diff --git a/typedapi/api._.go b/typedapi/api._.go old mode 100755 new mode 100644 index af53bacdde..54250aa2c9 --- a/typedapi/api._.go +++ b/typedapi/api._.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package typedapi @@ -101,6 +101,7 @@ import ( core_get_script_context "github.com/elastic/go-elasticsearch/v8/typedapi/core/getscriptcontext" core_get_script_languages "github.com/elastic/go-elasticsearch/v8/typedapi/core/getscriptlanguages" core_get_source "github.com/elastic/go-elasticsearch/v8/typedapi/core/getsource" + core_health_report "github.com/elastic/go-elasticsearch/v8/typedapi/core/healthreport" core_index "github.com/elastic/go-elasticsearch/v8/typedapi/core/index" core_info "github.com/elastic/go-elasticsearch/v8/typedapi/core/info" core_knn_search "github.com/elastic/go-elasticsearch/v8/typedapi/core/knnsearch" @@ -315,6 +316,11 @@ import ( searchable_snapshots_clear_cache "github.com/elastic/go-elasticsearch/v8/typedapi/searchablesnapshots/clearcache" searchable_snapshots_mount "github.com/elastic/go-elasticsearch/v8/typedapi/searchablesnapshots/mount" searchable_snapshots_stats "github.com/elastic/go-elasticsearch/v8/typedapi/searchablesnapshots/stats" + search_application_delete "github.com/elastic/go-elasticsearch/v8/typedapi/searchapplication/delete" + search_application_get "github.com/elastic/go-elasticsearch/v8/typedapi/searchapplication/get" + search_application_list "github.com/elastic/go-elasticsearch/v8/typedapi/searchapplication/list" + search_application_put "github.com/elastic/go-elasticsearch/v8/typedapi/searchapplication/put" + search_application_search "github.com/elastic/go-elasticsearch/v8/typedapi/searchapplication/search" security_activate_user_profile "github.com/elastic/go-elasticsearch/v8/typedapi/security/activateuserprofile" security_authenticate "github.com/elastic/go-elasticsearch/v8/typedapi/security/authenticate" security_bulk_update_api_keys "github.com/elastic/go-elasticsearch/v8/typedapi/security/bulkupdateapikeys" @@ -409,6 +415,7 @@ import ( transform_preview_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/previewtransform" transform_put_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/puttransform" transform_reset_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/resettransform" + transform_schedule_now_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/schedulenowtransform" transform_start_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/starttransform" transform_stop_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/stoptransform" transform_update_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/updatetransform" @@ -428,7 +435,7 @@ import ( xpack_usage "github.com/elastic/go-elasticsearch/v8/typedapi/xpack/usage" ) -type Async struct { +type AsyncSearch struct { // Deletes an async search by ID. If the search is still running, the search // request will be cancelled. Otherwise, the saved search results are deleted. Delete async_search_delete.NewDelete @@ -633,6 +640,8 @@ type Core struct { GetScriptLanguages core_get_script_languages.NewGetScriptLanguages // Returns the source of a document. GetSource core_get_source.NewGetSource + // Returns the health of the cluster. + HealthReport core_health_report.NewHealthReport // Creates or updates a document in an index. Index core_index.NewIndex // Returns basic information about the cluster. @@ -694,7 +703,7 @@ type Core struct { UpdateByQueryRethrottle core_update_by_query_rethrottle.NewUpdateByQueryRethrottle } -type Dangling struct { +type DanglingIndices struct { // Deletes the specified dangling index DeleteDanglingIndex dangling_indices_delete_dangling_index.NewDeleteDanglingIndex // Imports the specified dangling index @@ -1145,7 +1154,20 @@ type Rollup struct { StopJob rollup_stop_job.NewStopJob } -type Searchable struct { +type SearchApplication struct { + // Deletes a search application. + Delete search_application_delete.NewDelete + // Returns the details about a search application. + Get search_application_get.NewGet + // Returns the existing search applications. + List search_application_list.NewList + // Creates or updates a search application. + Put search_application_put.NewPut + // Perform a search against a search application + Search search_application_search.NewSearch +} + +type SearchableSnapshots struct { // Retrieve node-level cache statistics about searchable snapshots. CacheStats searchable_snapshots_cache_stats.NewCacheStats // Clear the cache of searchable snapshots. @@ -1390,6 +1412,8 @@ type Transform struct { PutTransform transform_put_transform.NewPutTransform // Resets an existing transform. ResetTransform transform_reset_transform.NewResetTransform + // Schedules now a transform. + ScheduleNowTransform transform_schedule_now_transform.NewScheduleNowTransform // Starts one or more transforms. StartTransform transform_start_transform.NewStartTransform // Stops one or more transforms. @@ -1434,38 +1458,39 @@ type Xpack struct { } type API struct { - Async Async - Autoscaling Autoscaling - Cat Cat - Ccr Ccr - Cluster Cluster - Core Core - Dangling Dangling - Enrich Enrich - Eql Eql - Features Features - Fleet Fleet - Graph Graph - Ilm Ilm - Indices Indices - Ingest Ingest - License License - Logstash Logstash - Migration Migration - Ml Ml - Nodes Nodes - Rollup Rollup - Searchable Searchable - Security Security - Shutdown Shutdown - Slm Slm - Snapshot Snapshot - Sql Sql - Ssl Ssl - Tasks Tasks - Transform Transform - Watcher Watcher - Xpack Xpack + AsyncSearch AsyncSearch + Autoscaling Autoscaling + Cat Cat + Ccr Ccr + Cluster Cluster + Core Core + DanglingIndices DanglingIndices + Enrich Enrich + Eql Eql + Features Features + Fleet Fleet + Graph Graph + Ilm Ilm + Indices Indices + Ingest Ingest + License License + Logstash Logstash + Migration Migration + Ml Ml + Nodes Nodes + Rollup Rollup + SearchApplication SearchApplication + SearchableSnapshots SearchableSnapshots + Security Security + Shutdown Shutdown + Slm Slm + Snapshot Snapshot + Sql Sql + Ssl Ssl + Tasks Tasks + Transform Transform + Watcher Watcher + Xpack Xpack // Explicitly clears the search context for a scroll. ClearScroll core_clear_scroll.NewClearScroll @@ -1506,6 +1531,8 @@ type API struct { GetScriptLanguages core_get_script_languages.NewGetScriptLanguages // Returns the source of a document. GetSource core_get_source.NewGetSource + // Returns the health of the cluster. + HealthReport core_health_report.NewHealthReport // Creates or updates a document in an index. Index core_index.NewIndex // Returns basic information about the cluster. @@ -1569,13 +1596,15 @@ type API struct { func New(tp elastictransport.Interface) *API { return &API{ - Async: Async{ + // AsyncSearch + AsyncSearch: AsyncSearch{ Delete: async_search_delete.NewDeleteFunc(tp), Get: async_search_get.NewGetFunc(tp), Status: async_search_status.NewStatusFunc(tp), Submit: async_search_submit.NewSubmitFunc(tp), }, + // Autoscaling Autoscaling: Autoscaling{ DeleteAutoscalingPolicy: autoscaling_delete_autoscaling_policy.NewDeleteAutoscalingPolicyFunc(tp), GetAutoscalingCapacity: autoscaling_get_autoscaling_capacity.NewGetAutoscalingCapacityFunc(tp), @@ -1583,6 +1612,7 @@ func New(tp elastictransport.Interface) *API { PutAutoscalingPolicy: autoscaling_put_autoscaling_policy.NewPutAutoscalingPolicyFunc(tp), }, + // Cat Cat: Cat{ Aliases: cat_aliases.NewAliasesFunc(tp), Allocation: cat_allocation.NewAllocationFunc(tp), @@ -1612,6 +1642,7 @@ func New(tp elastictransport.Interface) *API { Transforms: cat_transforms.NewTransformsFunc(tp), }, + // Ccr Ccr: Ccr{ DeleteAutoFollowPattern: ccr_delete_auto_follow_pattern.NewDeleteAutoFollowPatternFunc(tp), Follow: ccr_follow.NewFollowFunc(tp), @@ -1628,6 +1659,7 @@ func New(tp elastictransport.Interface) *API { Unfollow: ccr_unfollow.NewUnfollowFunc(tp), }, + // Cluster Cluster: Cluster{ AllocationExplain: cluster_allocation_explain.NewAllocationExplainFunc(tp), DeleteComponentTemplate: cluster_delete_component_template.NewDeleteComponentTemplateFunc(tp), @@ -1646,6 +1678,7 @@ func New(tp elastictransport.Interface) *API { Stats: cluster_stats.NewStatsFunc(tp), }, + // Core Core: Core{ ClearScroll: core_clear_scroll.NewClearScrollFunc(tp), ClosePointInTime: core_close_point_in_time.NewClosePointInTimeFunc(tp), @@ -1664,6 +1697,7 @@ func New(tp elastictransport.Interface) *API { GetScriptContext: core_get_script_context.NewGetScriptContextFunc(tp), GetScriptLanguages: core_get_script_languages.NewGetScriptLanguagesFunc(tp), GetSource: core_get_source.NewGetSourceFunc(tp), + HealthReport: core_health_report.NewHealthReportFunc(tp), Index: core_index.NewIndexFunc(tp), Info: core_info.NewInfoFunc(tp), KnnSearch: core_knn_search.NewKnnSearchFunc(tp), @@ -1689,12 +1723,14 @@ func New(tp elastictransport.Interface) *API { UpdateByQueryRethrottle: core_update_by_query_rethrottle.NewUpdateByQueryRethrottleFunc(tp), }, - Dangling: Dangling{ + // DanglingIndices + DanglingIndices: DanglingIndices{ DeleteDanglingIndex: dangling_indices_delete_dangling_index.NewDeleteDanglingIndexFunc(tp), ImportDanglingIndex: dangling_indices_import_dangling_index.NewImportDanglingIndexFunc(tp), ListDanglingIndices: dangling_indices_list_dangling_indices.NewListDanglingIndicesFunc(tp), }, + // Enrich Enrich: Enrich{ DeletePolicy: enrich_delete_policy.NewDeletePolicyFunc(tp), ExecutePolicy: enrich_execute_policy.NewExecutePolicyFunc(tp), @@ -1703,6 +1739,7 @@ func New(tp elastictransport.Interface) *API { Stats: enrich_stats.NewStatsFunc(tp), }, + // Eql Eql: Eql{ Delete: eql_delete.NewDeleteFunc(tp), Get: eql_get.NewGetFunc(tp), @@ -1710,20 +1747,24 @@ func New(tp elastictransport.Interface) *API { Search: eql_search.NewSearchFunc(tp), }, + // Features Features: Features{ GetFeatures: features_get_features.NewGetFeaturesFunc(tp), ResetFeatures: features_reset_features.NewResetFeaturesFunc(tp), }, + // Fleet Fleet: Fleet{ GlobalCheckpoints: fleet_global_checkpoints.NewGlobalCheckpointsFunc(tp), Search: fleet_search.NewSearchFunc(tp), }, + // Graph Graph: Graph{ Explore: graph_explore.NewExploreFunc(tp), }, + // Ilm Ilm: Ilm{ DeleteLifecycle: ilm_delete_lifecycle.NewDeleteLifecycleFunc(tp), ExplainLifecycle: ilm_explain_lifecycle.NewExplainLifecycleFunc(tp), @@ -1738,6 +1779,7 @@ func New(tp elastictransport.Interface) *API { Stop: ilm_stop.NewStopFunc(tp), }, + // Indices Indices: Indices{ AddBlock: indices_add_block.NewAddBlockFunc(tp), Analyze: indices_analyze.NewAnalyzeFunc(tp), @@ -1795,6 +1837,7 @@ func New(tp elastictransport.Interface) *API { ValidateQuery: indices_validate_query.NewValidateQueryFunc(tp), }, + // Ingest Ingest: Ingest{ DeletePipeline: ingest_delete_pipeline.NewDeletePipelineFunc(tp), GeoIpStats: ingest_geo_ip_stats.NewGeoIpStatsFunc(tp), @@ -1804,6 +1847,7 @@ func New(tp elastictransport.Interface) *API { Simulate: ingest_simulate.NewSimulateFunc(tp), }, + // License License: License{ Delete: license_delete.NewDeleteFunc(tp), Get: license_get.NewGetFunc(tp), @@ -1814,18 +1858,21 @@ func New(tp elastictransport.Interface) *API { PostStartTrial: license_post_start_trial.NewPostStartTrialFunc(tp), }, + // Logstash Logstash: Logstash{ DeletePipeline: logstash_delete_pipeline.NewDeletePipelineFunc(tp), GetPipeline: logstash_get_pipeline.NewGetPipelineFunc(tp), PutPipeline: logstash_put_pipeline.NewPutPipelineFunc(tp), }, + // Migration Migration: Migration{ Deprecations: migration_deprecations.NewDeprecationsFunc(tp), GetFeatureUpgradeStatus: migration_get_feature_upgrade_status.NewGetFeatureUpgradeStatusFunc(tp), PostFeatureUpgrade: migration_post_feature_upgrade.NewPostFeatureUpgradeFunc(tp), }, + // Ml Ml: Ml{ ClearTrainedModelDeploymentCache: ml_clear_trained_model_deployment_cache.NewClearTrainedModelDeploymentCacheFunc(tp), CloseJob: ml_close_job.NewCloseJobFunc(tp), @@ -1900,6 +1947,7 @@ func New(tp elastictransport.Interface) *API { ValidateDetector: ml_validate_detector.NewValidateDetectorFunc(tp), }, + // Nodes Nodes: Nodes{ ClearRepositoriesMeteringArchive: nodes_clear_repositories_metering_archive.NewClearRepositoriesMeteringArchiveFunc(tp), GetRepositoriesMeteringInfo: nodes_get_repositories_metering_info.NewGetRepositoriesMeteringInfoFunc(tp), @@ -1910,6 +1958,7 @@ func New(tp elastictransport.Interface) *API { Usage: nodes_usage.NewUsageFunc(tp), }, + // Rollup Rollup: Rollup{ DeleteJob: rollup_delete_job.NewDeleteJobFunc(tp), GetJobs: rollup_get_jobs.NewGetJobsFunc(tp), @@ -1921,13 +1970,24 @@ func New(tp elastictransport.Interface) *API { StopJob: rollup_stop_job.NewStopJobFunc(tp), }, - Searchable: Searchable{ + // SearchApplication + SearchApplication: SearchApplication{ + Delete: search_application_delete.NewDeleteFunc(tp), + Get: search_application_get.NewGetFunc(tp), + List: search_application_list.NewListFunc(tp), + Put: search_application_put.NewPutFunc(tp), + Search: search_application_search.NewSearchFunc(tp), + }, + + // SearchableSnapshots + SearchableSnapshots: SearchableSnapshots{ CacheStats: searchable_snapshots_cache_stats.NewCacheStatsFunc(tp), ClearCache: searchable_snapshots_clear_cache.NewClearCacheFunc(tp), Mount: searchable_snapshots_mount.NewMountFunc(tp), Stats: searchable_snapshots_stats.NewStatsFunc(tp), }, + // Security Security: Security{ ActivateUserProfile: security_activate_user_profile.NewActivateUserProfileFunc(tp), Authenticate: security_authenticate.NewAuthenticateFunc(tp), @@ -1986,12 +2046,14 @@ func New(tp elastictransport.Interface) *API { UpdateUserProfileData: security_update_user_profile_data.NewUpdateUserProfileDataFunc(tp), }, + // Shutdown Shutdown: Shutdown{ DeleteNode: shutdown_delete_node.NewDeleteNodeFunc(tp), GetNode: shutdown_get_node.NewGetNodeFunc(tp), PutNode: shutdown_put_node.NewPutNodeFunc(tp), }, + // Slm Slm: Slm{ DeleteLifecycle: slm_delete_lifecycle.NewDeleteLifecycleFunc(tp), ExecuteLifecycle: slm_execute_lifecycle.NewExecuteLifecycleFunc(tp), @@ -2004,6 +2066,7 @@ func New(tp elastictransport.Interface) *API { Stop: slm_stop.NewStopFunc(tp), }, + // Snapshot Snapshot: Snapshot{ CleanupRepository: snapshot_cleanup_repository.NewCleanupRepositoryFunc(tp), Clone: snapshot_clone.NewCloneFunc(tp), @@ -2018,6 +2081,7 @@ func New(tp elastictransport.Interface) *API { VerifyRepository: snapshot_verify_repository.NewVerifyRepositoryFunc(tp), }, + // Sql Sql: Sql{ ClearCursor: sql_clear_cursor.NewClearCursorFunc(tp), DeleteAsync: sql_delete_async.NewDeleteAsyncFunc(tp), @@ -2027,29 +2091,34 @@ func New(tp elastictransport.Interface) *API { Translate: sql_translate.NewTranslateFunc(tp), }, + // Ssl Ssl: Ssl{ Certificates: ssl_certificates.NewCertificatesFunc(tp), }, + // Tasks Tasks: Tasks{ Cancel: tasks_cancel.NewCancelFunc(tp), Get: tasks_get.NewGetFunc(tp), List: tasks_list.NewListFunc(tp), }, + // Transform Transform: Transform{ - DeleteTransform: transform_delete_transform.NewDeleteTransformFunc(tp), - GetTransform: transform_get_transform.NewGetTransformFunc(tp), - GetTransformStats: transform_get_transform_stats.NewGetTransformStatsFunc(tp), - PreviewTransform: transform_preview_transform.NewPreviewTransformFunc(tp), - PutTransform: transform_put_transform.NewPutTransformFunc(tp), - ResetTransform: transform_reset_transform.NewResetTransformFunc(tp), - StartTransform: transform_start_transform.NewStartTransformFunc(tp), - StopTransform: transform_stop_transform.NewStopTransformFunc(tp), - UpdateTransform: transform_update_transform.NewUpdateTransformFunc(tp), - UpgradeTransforms: transform_upgrade_transforms.NewUpgradeTransformsFunc(tp), + DeleteTransform: transform_delete_transform.NewDeleteTransformFunc(tp), + GetTransform: transform_get_transform.NewGetTransformFunc(tp), + GetTransformStats: transform_get_transform_stats.NewGetTransformStatsFunc(tp), + PreviewTransform: transform_preview_transform.NewPreviewTransformFunc(tp), + PutTransform: transform_put_transform.NewPutTransformFunc(tp), + ResetTransform: transform_reset_transform.NewResetTransformFunc(tp), + ScheduleNowTransform: transform_schedule_now_transform.NewScheduleNowTransformFunc(tp), + StartTransform: transform_start_transform.NewStartTransformFunc(tp), + StopTransform: transform_stop_transform.NewStopTransformFunc(tp), + UpdateTransform: transform_update_transform.NewUpdateTransformFunc(tp), + UpgradeTransforms: transform_upgrade_transforms.NewUpgradeTransformsFunc(tp), }, + // Watcher Watcher: Watcher{ AckWatch: watcher_ack_watch.NewAckWatchFunc(tp), ActivateWatch: watcher_activate_watch.NewActivateWatchFunc(tp), @@ -2064,6 +2133,7 @@ func New(tp elastictransport.Interface) *API { Stop: watcher_stop.NewStopFunc(tp), }, + // Xpack Xpack: Xpack{ Info: xpack_info.NewInfoFunc(tp), Usage: xpack_usage.NewUsageFunc(tp), @@ -2086,6 +2156,7 @@ func New(tp elastictransport.Interface) *API { GetScriptContext: core_get_script_context.NewGetScriptContextFunc(tp), GetScriptLanguages: core_get_script_languages.NewGetScriptLanguagesFunc(tp), GetSource: core_get_source.NewGetSourceFunc(tp), + HealthReport: core_health_report.NewHealthReportFunc(tp), Index: core_index.NewIndexFunc(tp), Info: core_info.NewInfoFunc(tp), KnnSearch: core_knn_search.NewKnnSearchFunc(tp), diff --git a/typedapi/asyncsearch/delete/delete.go b/typedapi/asyncsearch/delete/delete.go old mode 100755 new mode 100644 index 3929561f10..77cc3f9227 --- a/typedapi/asyncsearch/delete/delete.go +++ b/typedapi/asyncsearch/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an async search by ID. If the search is still running, the search // request will be cancelled. Otherwise, the saved search results are deleted. @@ -170,7 +170,6 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/asyncsearch/delete/response.go b/typedapi/asyncsearch/delete/response.go old mode 100755 new mode 100644 index 3bd41504db..cf333699e0 --- a/typedapi/asyncsearch/delete/response.go +++ b/typedapi/asyncsearch/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/asyncsearch/get/get.go b/typedapi/asyncsearch/get/get.go old mode 100755 new mode 100644 index e51ba6ae7d..ba2ccebf9e --- a/typedapi/asyncsearch/get/get.go +++ b/typedapi/asyncsearch/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the results of a previously submitted async search request given // its ID. @@ -173,7 +173,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/asyncsearch/get/response.go b/typedapi/asyncsearch/get/response.go old mode 100755 new mode 100644 index 892060ab01..44cc91c1e3 --- a/typedapi/asyncsearch/get/response.go +++ b/typedapi/asyncsearch/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 type Response struct { Response types.AsyncSearch `json:"response"` diff --git a/typedapi/asyncsearch/status/response.go b/typedapi/asyncsearch/status/response.go old mode 100755 new mode 100644 index c544215897..fb93e71ce9 --- a/typedapi/asyncsearch/status/response.go +++ b/typedapi/asyncsearch/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/status/AsyncSearchStatusResponse.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/status/AsyncSearchStatusResponse.ts#L28-L30 type Response struct { CompletionStatus *int `json:"completion_status,omitempty"` diff --git a/typedapi/asyncsearch/status/status.go b/typedapi/asyncsearch/status/status.go old mode 100755 new mode 100644 index c238f516d1..fff86fa594 --- a/typedapi/asyncsearch/status/status.go +++ b/typedapi/asyncsearch/status/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the status of a previously submitted async search request given its // ID. @@ -172,7 +172,6 @@ func (r Status) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/asyncsearch/submit/request.go b/typedapi/asyncsearch/submit/request.go old mode 100755 new mode 100644 index 93d9af7008..dd9365a9a4 --- a/typedapi/asyncsearch/submit/request.go +++ b/typedapi/asyncsearch/submit/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package submit @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L255 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L255 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -53,7 +53,7 @@ type Request struct { // IndicesBoost Boosts the _score of documents from specified indices. IndicesBoost []map[string]types.Float64 `json:"indices_boost,omitempty"` // Knn Defines the approximate kNN search to run. - Knn *types.KnnQuery `json:"knn,omitempty"` + Knn []types.KnnQuery `json:"knn,omitempty"` // MinScore Minimum _score for matching documents. Documents with a lower _score are // not included in the search results. MinScore *types.Float64 `json:"min_score,omitempty"` @@ -67,7 +67,7 @@ type Request struct { Rescore []types.Rescore `json:"rescore,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. These fields take // precedence over mapped fields with the same name. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Retrieve a script evaluation (based on different fields) for each hit. ScriptFields map[string]types.ScriptField `json:"script_fields,omitempty"` SearchAfter []types.FieldValue `json:"search_after,omitempty"` diff --git a/typedapi/asyncsearch/submit/response.go b/typedapi/asyncsearch/submit/response.go old mode 100755 new mode 100644 index 42073a2685..363c61dd3c --- a/typedapi/asyncsearch/submit/response.go +++ b/typedapi/asyncsearch/submit/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package submit @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 type Response struct { Response types.AsyncSearch `json:"response"` diff --git a/typedapi/asyncsearch/submit/submit.go b/typedapi/asyncsearch/submit/submit.go old mode 100755 new mode 100644 index 9b898b4165..749ea2e600 --- a/typedapi/asyncsearch/submit/submit.go +++ b/typedapi/asyncsearch/submit/submit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Executes a search request asynchronously. package submit @@ -213,7 +213,6 @@ func (r Submit) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go old mode 100755 new mode 100644 index 51e8b171f2..6ddd6511ed --- a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. @@ -172,7 +172,6 @@ func (r DeleteAutoscalingPolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/response.go b/typedapi/autoscaling/deleteautoscalingpolicy/response.go old mode 100755 new mode 100644 index 5af053d691..838dadcb9f --- a/typedapi/autoscaling/deleteautoscalingpolicy/response.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteautoscalingpolicy // Response holds the response body struct for the package deleteautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go old mode 100755 new mode 100644 index 867c476235..8541f1893b --- a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go +++ b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets the current autoscaling capacity based on the configured autoscaling // policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not @@ -163,7 +163,6 @@ func (r GetAutoscalingCapacity) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/autoscaling/getautoscalingcapacity/response.go b/typedapi/autoscaling/getautoscalingcapacity/response.go old mode 100755 new mode 100644 index 54fcdab6d0..e0c3d740c6 --- a/typedapi/autoscaling/getautoscalingcapacity/response.go +++ b/typedapi/autoscaling/getautoscalingcapacity/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getautoscalingcapacity @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautoscalingcapacity // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 type Response struct { Policies map[string]types.AutoscalingDeciders `json:"policies"` diff --git a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go old mode 100755 new mode 100644 index f7a42557fe..a1ed72ccec --- a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go +++ b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. @@ -172,7 +172,6 @@ func (r GetAutoscalingPolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/autoscaling/getautoscalingpolicy/response.go b/typedapi/autoscaling/getautoscalingpolicy/response.go old mode 100755 new mode 100644 index 103ec9ced1..76a633475b --- a/typedapi/autoscaling/getautoscalingpolicy/response.go +++ b/typedapi/autoscaling/getautoscalingpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getautoscalingpolicy @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package getautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go old mode 100755 new mode 100644 index 1562a941a5..f514f22847 --- a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go +++ b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. @@ -207,7 +207,6 @@ func (r PutAutoscalingPolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/autoscaling/putautoscalingpolicy/response.go b/typedapi/autoscaling/putautoscalingpolicy/response.go old mode 100755 new mode 100644 index 3373fbb2ff..3784250b2b --- a/typedapi/autoscaling/putautoscalingpolicy/response.go +++ b/typedapi/autoscaling/putautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putautoscalingpolicy // Response holds the response body struct for the package putautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/cat/aliases/aliases.go b/typedapi/cat/aliases/aliases.go old mode 100755 new mode 100644 index 07eee4b0c5..3bc36ad1df --- a/typedapi/cat/aliases/aliases.go +++ b/typedapi/cat/aliases/aliases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Shows information about currently configured aliases to indices including // filter and routing infos. @@ -177,7 +177,6 @@ func (r Aliases) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/aliases/response.go b/typedapi/cat/aliases/response.go old mode 100755 new mode 100644 index 326af52478..9d11d16271 --- a/typedapi/cat/aliases/response.go +++ b/typedapi/cat/aliases/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package aliases @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package aliases // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 type Response []types.AliasesRecord diff --git a/typedapi/cat/allocation/allocation.go b/typedapi/cat/allocation/allocation.go old mode 100755 new mode 100644 index fb936fa6a1..62bef4983b --- a/typedapi/cat/allocation/allocation.go +++ b/typedapi/cat/allocation/allocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides a snapshot of how many shards are allocated to each data node and // how much disk space they are using. @@ -179,7 +179,6 @@ func (r Allocation) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/allocation/response.go b/typedapi/cat/allocation/response.go old mode 100755 new mode 100644 index 5c926255fe..f83e93e421 --- a/typedapi/cat/allocation/response.go +++ b/typedapi/cat/allocation/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package allocation @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package allocation // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 type Response []types.AllocationRecord diff --git a/typedapi/cat/componenttemplates/component_templates.go b/typedapi/cat/componenttemplates/component_templates.go old mode 100755 new mode 100644 index 2e4843bc44..8ac2f2989c --- a/typedapi/cat/componenttemplates/component_templates.go +++ b/typedapi/cat/componenttemplates/component_templates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about existing component_templates templates. package componenttemplates @@ -73,7 +73,7 @@ func NewComponentTemplatesFunc(tp elastictransport.Interface) NewComponentTempla // Returns information about existing component_templates templates. // -// https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-compoentn-templates.html +// https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-component-templates.html func New(tp elastictransport.Interface) *ComponentTemplates { r := &ComponentTemplates{ transport: tp, @@ -175,7 +175,6 @@ func (r ComponentTemplates) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/componenttemplates/response.go b/typedapi/cat/componenttemplates/response.go old mode 100755 new mode 100644 index 4550c6781f..14c40e17f1 --- a/typedapi/cat/componenttemplates/response.go +++ b/typedapi/cat/componenttemplates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package componenttemplates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package componenttemplates // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 type Response []types.CatComponentTemplate diff --git a/typedapi/cat/count/count.go b/typedapi/cat/count/count.go old mode 100755 new mode 100644 index 1264fbdd0b..228e9c86d0 --- a/typedapi/cat/count/count.go +++ b/typedapi/cat/count/count.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides quick access to the document count of the entire cluster, or // individual indices. @@ -177,7 +177,6 @@ func (r Count) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/count/response.go b/typedapi/cat/count/response.go old mode 100755 new mode 100644 index 626795ea0e..bbffa736e3 --- a/typedapi/cat/count/response.go +++ b/typedapi/cat/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/count/CatCountResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/count/CatCountResponse.ts#L22-L24 type Response []types.CountRecord diff --git a/typedapi/cat/fielddata/fielddata.go b/typedapi/cat/fielddata/fielddata.go old mode 100755 new mode 100644 index 88f2fbe858..37a267e890 --- a/typedapi/cat/fielddata/fielddata.go +++ b/typedapi/cat/fielddata/fielddata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Shows how much heap memory is currently being used by fielddata on every data // node in the cluster. @@ -179,7 +179,6 @@ func (r Fielddata) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/fielddata/response.go b/typedapi/cat/fielddata/response.go old mode 100755 new mode 100644 index 65e35db732..f214a686b3 --- a/typedapi/cat/fielddata/response.go +++ b/typedapi/cat/fielddata/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package fielddata @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fielddata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 type Response []types.FielddataRecord diff --git a/typedapi/cat/health/health.go b/typedapi/cat/health/health.go old mode 100755 new mode 100644 index 8b8062dae7..fdc075e18c --- a/typedapi/cat/health/health.go +++ b/typedapi/cat/health/health.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a concise representation of the cluster health. package health @@ -160,7 +160,6 @@ func (r Health) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/health/response.go b/typedapi/cat/health/response.go old mode 100755 new mode 100644 index c6560d09c3..62680e8e36 --- a/typedapi/cat/health/response.go +++ b/typedapi/cat/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package health @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/health/CatHealthResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/health/CatHealthResponse.ts#L22-L24 type Response []types.HealthRecord diff --git a/typedapi/cat/help/help.go b/typedapi/cat/help/help.go old mode 100755 new mode 100644 index 7c2972addc..d832c89d67 --- a/typedapi/cat/help/help.go +++ b/typedapi/cat/help/help.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns help for the Cat APIs. package help @@ -157,7 +157,6 @@ func (r Help) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/help/response.go b/typedapi/cat/help/response.go old mode 100755 new mode 100644 index d5cbd049ea..97c464595d --- a/typedapi/cat/help/response.go +++ b/typedapi/cat/help/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package help @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package help // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/help/CatHelpResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/help/CatHelpResponse.ts#L22-L24 type Response []types.HelpRecord diff --git a/typedapi/cat/indices/indices.go b/typedapi/cat/indices/indices.go old mode 100755 new mode 100644 index c8b077e540..ec4c7a4e19 --- a/typedapi/cat/indices/indices.go +++ b/typedapi/cat/indices/indices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about indices: number of primaries and replicas, document // counts, disk size, ... @@ -182,7 +182,6 @@ func (r Indices) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/indices/response.go b/typedapi/cat/indices/response.go old mode 100755 new mode 100644 index cd4428cb78..f1f19b40ac --- a/typedapi/cat/indices/response.go +++ b/typedapi/cat/indices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package indices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package indices // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/indices/CatIndicesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/indices/CatIndicesResponse.ts#L22-L24 type Response []types.IndicesRecord diff --git a/typedapi/cat/master/master.go b/typedapi/cat/master/master.go old mode 100755 new mode 100644 index 6b2bd82eaa..fe4401e6c4 --- a/typedapi/cat/master/master.go +++ b/typedapi/cat/master/master.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about the master node. package master @@ -159,7 +159,6 @@ func (r Master) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/master/response.go b/typedapi/cat/master/response.go old mode 100755 new mode 100644 index 1d7370cce9..6bcb8fa537 --- a/typedapi/cat/master/response.go +++ b/typedapi/cat/master/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package master @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package master // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/master/CatMasterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/master/CatMasterResponse.ts#L22-L24 type Response []types.MasterRecord diff --git a/typedapi/cat/mldatafeeds/ml_datafeeds.go b/typedapi/cat/mldatafeeds/ml_datafeeds.go old mode 100755 new mode 100644 index bf25621045..2b0c0ffdcb --- a/typedapi/cat/mldatafeeds/ml_datafeeds.go +++ b/typedapi/cat/mldatafeeds/ml_datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configuration and usage information about datafeeds. package mldatafeeds @@ -182,7 +182,6 @@ func (r MlDatafeeds) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/mldatafeeds/response.go b/typedapi/cat/mldatafeeds/response.go old mode 100755 new mode 100644 index a534d686c2..01db47e1e8 --- a/typedapi/cat/mldatafeeds/response.go +++ b/typedapi/cat/mldatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mldatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 type Response []types.DatafeedsRecord diff --git a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go old mode 100755 new mode 100644 index 10e7713744..8f232a5d48 --- a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go +++ b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configuration and usage information about data frame analytics jobs. package mldataframeanalytics @@ -186,7 +186,6 @@ func (r MlDataFrameAnalytics) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/mldataframeanalytics/response.go b/typedapi/cat/mldataframeanalytics/response.go old mode 100755 new mode 100644 index d6aff44efd..279c8ab6b9 --- a/typedapi/cat/mldataframeanalytics/response.go +++ b/typedapi/cat/mldataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mldataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 type Response []types.DataFrameAnalyticsRecord diff --git a/typedapi/cat/mljobs/ml_jobs.go b/typedapi/cat/mljobs/ml_jobs.go old mode 100755 new mode 100644 index 2babd1afa1..d6129bc937 --- a/typedapi/cat/mljobs/ml_jobs.go +++ b/typedapi/cat/mljobs/ml_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configuration and usage information about anomaly detection jobs. package mljobs @@ -183,7 +183,6 @@ func (r MlJobs) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/mljobs/response.go b/typedapi/cat/mljobs/response.go old mode 100755 new mode 100644 index 1b3ea5a853..1375376b2c --- a/typedapi/cat/mljobs/response.go +++ b/typedapi/cat/mljobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mljobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mljobs // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 type Response []types.JobsRecord diff --git a/typedapi/cat/mltrainedmodels/ml_trained_models.go b/typedapi/cat/mltrainedmodels/ml_trained_models.go old mode 100755 new mode 100644 index d2e8014724..0175f3347e --- a/typedapi/cat/mltrainedmodels/ml_trained_models.go +++ b/typedapi/cat/mltrainedmodels/ml_trained_models.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configuration and usage information about inference trained models. package mltrainedmodels @@ -182,7 +182,6 @@ func (r MlTrainedModels) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/mltrainedmodels/response.go b/typedapi/cat/mltrainedmodels/response.go old mode 100755 new mode 100644 index c68f3d82b6..55351c3efc --- a/typedapi/cat/mltrainedmodels/response.go +++ b/typedapi/cat/mltrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mltrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mltrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 type Response []types.TrainedModelsRecord diff --git a/typedapi/cat/nodeattrs/nodeattrs.go b/typedapi/cat/nodeattrs/nodeattrs.go old mode 100755 new mode 100644 index e1f32734ef..d5d5b698fb --- a/typedapi/cat/nodeattrs/nodeattrs.go +++ b/typedapi/cat/nodeattrs/nodeattrs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about custom node attributes. package nodeattrs @@ -159,7 +159,6 @@ func (r Nodeattrs) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/nodeattrs/response.go b/typedapi/cat/nodeattrs/response.go old mode 100755 new mode 100644 index f058d5d3a6..d555f08858 --- a/typedapi/cat/nodeattrs/response.go +++ b/typedapi/cat/nodeattrs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package nodeattrs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodeattrs // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 type Response []types.NodeAttributesRecord diff --git a/typedapi/cat/nodes/nodes.go b/typedapi/cat/nodes/nodes.go old mode 100755 new mode 100644 index a0341df72c..d5c3dcb1c0 --- a/typedapi/cat/nodes/nodes.go +++ b/typedapi/cat/nodes/nodes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns basic statistics about performance of cluster nodes. package nodes @@ -161,7 +161,6 @@ func (r Nodes) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/nodes/response.go b/typedapi/cat/nodes/response.go old mode 100755 new mode 100644 index be9bed2ca4..7067c4efc5 --- a/typedapi/cat/nodes/response.go +++ b/typedapi/cat/nodes/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package nodes @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodes // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/nodes/CatNodesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/nodes/CatNodesResponse.ts#L22-L24 type Response []types.NodesRecord diff --git a/typedapi/cat/pendingtasks/pending_tasks.go b/typedapi/cat/pendingtasks/pending_tasks.go old mode 100755 new mode 100644 index ed8d418634..ee6e202d98 --- a/typedapi/cat/pendingtasks/pending_tasks.go +++ b/typedapi/cat/pendingtasks/pending_tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a concise representation of the cluster pending tasks. package pendingtasks @@ -159,7 +159,6 @@ func (r PendingTasks) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/pendingtasks/response.go b/typedapi/cat/pendingtasks/response.go old mode 100755 new mode 100644 index 21277ea067..b355102d3a --- a/typedapi/cat/pendingtasks/response.go +++ b/typedapi/cat/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 type Response []types.PendingTasksRecord diff --git a/typedapi/cat/plugins/plugins.go b/typedapi/cat/plugins/plugins.go old mode 100755 new mode 100644 index 930b8168d2..b291f90ee4 --- a/typedapi/cat/plugins/plugins.go +++ b/typedapi/cat/plugins/plugins.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about installed plugins across nodes node. package plugins @@ -159,7 +159,6 @@ func (r Plugins) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/plugins/response.go b/typedapi/cat/plugins/response.go old mode 100755 new mode 100644 index 38360ce557..77cd8d1da4 --- a/typedapi/cat/plugins/response.go +++ b/typedapi/cat/plugins/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package plugins @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package plugins // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 type Response []types.PluginsRecord diff --git a/typedapi/cat/recovery/recovery.go b/typedapi/cat/recovery/recovery.go old mode 100755 new mode 100644 index 5223542162..a9afba974e --- a/typedapi/cat/recovery/recovery.go +++ b/typedapi/cat/recovery/recovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about index shard recoveries, both on-going completed. package recovery @@ -178,7 +178,6 @@ func (r Recovery) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/recovery/response.go b/typedapi/cat/recovery/response.go old mode 100755 new mode 100644 index d4f49765e2..2a4f4ef1dc --- a/typedapi/cat/recovery/response.go +++ b/typedapi/cat/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 type Response []types.RecoveryRecord diff --git a/typedapi/cat/repositories/repositories.go b/typedapi/cat/repositories/repositories.go old mode 100755 new mode 100644 index 62924238df..6ddb32d26d --- a/typedapi/cat/repositories/repositories.go +++ b/typedapi/cat/repositories/repositories.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about snapshot repositories registered in the cluster. package repositories @@ -159,7 +159,6 @@ func (r Repositories) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/repositories/response.go b/typedapi/cat/repositories/response.go old mode 100755 new mode 100644 index 751286a98d..60b6ad5c80 --- a/typedapi/cat/repositories/response.go +++ b/typedapi/cat/repositories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package repositories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package repositories // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 type Response []types.RepositoriesRecord diff --git a/typedapi/cat/segments/response.go b/typedapi/cat/segments/response.go old mode 100755 new mode 100644 index ea732380d0..6b8616c58b --- a/typedapi/cat/segments/response.go +++ b/typedapi/cat/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 type Response []types.SegmentsRecord diff --git a/typedapi/cat/segments/segments.go b/typedapi/cat/segments/segments.go old mode 100755 new mode 100644 index 42d54a3406..d019cdd28a --- a/typedapi/cat/segments/segments.go +++ b/typedapi/cat/segments/segments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides low-level information about the segments in the shards of an index. package segments @@ -177,7 +177,6 @@ func (r Segments) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/shards/response.go b/typedapi/cat/shards/response.go old mode 100755 new mode 100644 index 50a9eed63a..8cd012b97c --- a/typedapi/cat/shards/response.go +++ b/typedapi/cat/shards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package shards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shards // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/shards/CatShardsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/shards/CatShardsResponse.ts#L22-L24 type Response []types.ShardsRecord diff --git a/typedapi/cat/shards/shards.go b/typedapi/cat/shards/shards.go old mode 100755 new mode 100644 index 41195051e3..4da887be79 --- a/typedapi/cat/shards/shards.go +++ b/typedapi/cat/shards/shards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides a detailed view of shard allocation on nodes. package shards @@ -177,7 +177,6 @@ func (r Shards) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/snapshots/response.go b/typedapi/cat/snapshots/response.go old mode 100755 new mode 100644 index d361c00460..21b49f5e2a --- a/typedapi/cat/snapshots/response.go +++ b/typedapi/cat/snapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package snapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package snapshots // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 type Response []types.SnapshotsRecord diff --git a/typedapi/cat/snapshots/snapshots.go b/typedapi/cat/snapshots/snapshots.go old mode 100755 new mode 100644 index 40a8c30297..aa226c4e5b --- a/typedapi/cat/snapshots/snapshots.go +++ b/typedapi/cat/snapshots/snapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns all snapshots in a specific repository. package snapshots @@ -176,7 +176,6 @@ func (r Snapshots) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/tasks/response.go b/typedapi/cat/tasks/response.go old mode 100755 new mode 100644 index cb17027f8d..61842808c7 --- a/typedapi/cat/tasks/response.go +++ b/typedapi/cat/tasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package tasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package tasks // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/tasks/CatTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/tasks/CatTasksResponse.ts#L22-L24 type Response []types.TasksRecord diff --git a/typedapi/cat/tasks/tasks.go b/typedapi/cat/tasks/tasks.go old mode 100755 new mode 100644 index f9c6eecea5..785a4f903a --- a/typedapi/cat/tasks/tasks.go +++ b/typedapi/cat/tasks/tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about the tasks currently executing on one or more nodes // in the cluster. @@ -162,7 +162,6 @@ func (r Tasks) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/templates/response.go b/typedapi/cat/templates/response.go old mode 100755 new mode 100644 index fd67ca4de4..44151b0621 --- a/typedapi/cat/templates/response.go +++ b/typedapi/cat/templates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package templates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package templates // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 type Response []types.TemplatesRecord diff --git a/typedapi/cat/templates/templates.go b/typedapi/cat/templates/templates.go old mode 100755 new mode 100644 index 74b096088f..63de6f71cf --- a/typedapi/cat/templates/templates.go +++ b/typedapi/cat/templates/templates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about existing templates. package templates @@ -175,7 +175,6 @@ func (r Templates) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/threadpool/response.go b/typedapi/cat/threadpool/response.go old mode 100755 new mode 100644 index 32c480daa9..aa77557312 --- a/typedapi/cat/threadpool/response.go +++ b/typedapi/cat/threadpool/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package threadpool @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package threadpool // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 type Response []types.ThreadPoolRecord diff --git a/typedapi/cat/threadpool/thread_pool.go b/typedapi/cat/threadpool/thread_pool.go old mode 100755 new mode 100644 index 6d43bf84df..1707752ce6 --- a/typedapi/cat/threadpool/thread_pool.go +++ b/typedapi/cat/threadpool/thread_pool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns cluster-wide thread pool statistics per node. // By default the active, queue and rejected statistics are returned for all @@ -181,7 +181,6 @@ func (r ThreadPool) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cat/transforms/response.go b/typedapi/cat/transforms/response.go old mode 100755 new mode 100644 index 381a583da2..e2967d089b --- a/typedapi/cat/transforms/response.go +++ b/typedapi/cat/transforms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package transforms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package transforms // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 type Response []types.TransformsRecord diff --git a/typedapi/cat/transforms/transforms.go b/typedapi/cat/transforms/transforms.go old mode 100755 new mode 100644 index c367fa0347..1add4d0e2a --- a/typedapi/cat/transforms/transforms.go +++ b/typedapi/cat/transforms/transforms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configuration and usage information about transforms. package transforms @@ -178,7 +178,6 @@ func (r Transforms) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go old mode 100755 new mode 100644 index 43224c0d80..61a5bd4909 --- a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go +++ b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes auto-follow patterns. package deleteautofollowpattern @@ -170,7 +170,6 @@ func (r DeleteAutoFollowPattern) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/deleteautofollowpattern/response.go b/typedapi/ccr/deleteautofollowpattern/response.go old mode 100755 new mode 100644 index 87d13b2dfc..2d1d78774b --- a/typedapi/ccr/deleteautofollowpattern/response.go +++ b/typedapi/ccr/deleteautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteautofollowpattern // Response holds the response body struct for the package deleteautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/follow/follow.go b/typedapi/ccr/follow/follow.go old mode 100755 new mode 100644 index c4012bc245..977f90977a --- a/typedapi/ccr/follow/follow.go +++ b/typedapi/ccr/follow/follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new follower index configured to follow the referenced leader // index. @@ -207,7 +207,6 @@ func (r Follow) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/follow/request.go b/typedapi/ccr/follow/request.go old mode 100755 new mode 100644 index bbda6ea1fe..abfc77435b --- a/typedapi/ccr/follow/request.go +++ b/typedapi/ccr/follow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package follow @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L52 type Request struct { LeaderIndex *string `json:"leader_index,omitempty"` MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` diff --git a/typedapi/ccr/follow/response.go b/typedapi/ccr/follow/response.go old mode 100755 new mode 100644 index 54d9366291..698e09a48f --- a/typedapi/ccr/follow/response.go +++ b/typedapi/ccr/follow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package follow // Response holds the response body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 type Response struct { FollowIndexCreated bool `json:"follow_index_created"` diff --git a/typedapi/ccr/followinfo/follow_info.go b/typedapi/ccr/followinfo/follow_info.go old mode 100755 new mode 100644 index b23574bcbc..8db0e9053d --- a/typedapi/ccr/followinfo/follow_info.go +++ b/typedapi/ccr/followinfo/follow_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about all follower indices, including parameters and // status for each follower index @@ -172,7 +172,6 @@ func (r FollowInfo) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/followinfo/response.go b/typedapi/ccr/followinfo/response.go old mode 100755 new mode 100644 index f6313b1e44..5c88dae294 --- a/typedapi/ccr/followinfo/response.go +++ b/typedapi/ccr/followinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package followinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followinfo // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 type Response struct { FollowerIndices []types.FollowerIndex `json:"follower_indices"` diff --git a/typedapi/ccr/followstats/follow_stats.go b/typedapi/ccr/followstats/follow_stats.go old mode 100755 new mode 100644 index 6403299c50..e7384f29d4 --- a/typedapi/ccr/followstats/follow_stats.go +++ b/typedapi/ccr/followstats/follow_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves follower stats. return shard-level stats about the following tasks // associated with each shard for the specified indices. @@ -172,7 +172,6 @@ func (r FollowStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/followstats/response.go b/typedapi/ccr/followstats/response.go old mode 100755 new mode 100644 index 049a038916..4c7c5f4f2a --- a/typedapi/ccr/followstats/response.go +++ b/typedapi/ccr/followstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package followstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 type Response struct { Indices []types.FollowIndexStats `json:"indices"` diff --git a/typedapi/ccr/forgetfollower/forget_follower.go b/typedapi/ccr/forgetfollower/forget_follower.go old mode 100755 new mode 100644 index ce852ad7ed..4f947bc084 --- a/typedapi/ccr/forgetfollower/forget_follower.go +++ b/typedapi/ccr/forgetfollower/forget_follower.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes the follower retention leases from the leader. package forgetfollower @@ -205,7 +205,6 @@ func (r ForgetFollower) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/forgetfollower/request.go b/typedapi/ccr/forgetfollower/request.go old mode 100755 new mode 100644 index bb3fd88ffe..0299596709 --- a/typedapi/ccr/forgetfollower/request.go +++ b/typedapi/ccr/forgetfollower/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package forgetfollower @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L39 type Request struct { FollowerCluster *string `json:"follower_cluster,omitempty"` FollowerIndex *string `json:"follower_index,omitempty"` diff --git a/typedapi/ccr/forgetfollower/response.go b/typedapi/ccr/forgetfollower/response.go old mode 100755 new mode 100644 index 295d48c351..455db2dfa0 --- a/typedapi/ccr/forgetfollower/response.go +++ b/typedapi/ccr/forgetfollower/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package forgetfollower @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go old mode 100755 new mode 100644 index b18bec0077..a1ed580332 --- a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go +++ b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets configured auto-follow patterns. Returns the specified auto-follow // pattern collection. @@ -177,7 +177,6 @@ func (r GetAutoFollowPattern) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/getautofollowpattern/response.go b/typedapi/ccr/getautofollowpattern/response.go old mode 100755 new mode 100644 index 373bf24ff1..0bcfd9f306 --- a/typedapi/ccr/getautofollowpattern/response.go +++ b/typedapi/ccr/getautofollowpattern/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getautofollowpattern @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 type Response struct { Patterns []types.AutoFollowPattern `json:"patterns"` diff --git a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go old mode 100755 new mode 100644 index b1538ff1ba..cd9db3c9ac --- a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go +++ b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Pauses an auto-follow pattern package pauseautofollowpattern @@ -172,7 +172,6 @@ func (r PauseAutoFollowPattern) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/pauseautofollowpattern/response.go b/typedapi/ccr/pauseautofollowpattern/response.go old mode 100755 new mode 100644 index 6732a89f3a..3d57a07afc --- a/typedapi/ccr/pauseautofollowpattern/response.go +++ b/typedapi/ccr/pauseautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package pauseautofollowpattern // Response holds the response body struct for the package pauseautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/pausefollow/pause_follow.go b/typedapi/ccr/pausefollow/pause_follow.go old mode 100755 new mode 100644 index 0430990a97..f687162423 --- a/typedapi/ccr/pausefollow/pause_follow.go +++ b/typedapi/ccr/pausefollow/pause_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Pauses a follower index. The follower index will not fetch any additional // operations from the leader index. @@ -172,7 +172,6 @@ func (r PauseFollow) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/pausefollow/response.go b/typedapi/ccr/pausefollow/response.go old mode 100755 new mode 100644 index ec71ea5183..40410a6874 --- a/typedapi/ccr/pausefollow/response.go +++ b/typedapi/ccr/pausefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package pausefollow // Response holds the response body struct for the package pausefollow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go old mode 100755 new mode 100644 index 979c3084d8..f4573c9220 --- a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go +++ b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new named collection of auto-follow patterns against a specified // remote cluster. Newly created indices on the remote cluster matching any of @@ -209,7 +209,6 @@ func (r PutAutoFollowPattern) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/putautofollowpattern/request.go b/typedapi/ccr/putautofollowpattern/request.go old mode 100755 new mode 100644 index e0efaf5bc7..e019ebf394 --- a/typedapi/ccr/putautofollowpattern/request.go +++ b/typedapi/ccr/putautofollowpattern/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putautofollowpattern @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L113 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L113 type Request struct { // FollowIndexPattern The name of follower index. The template {{leader_index}} can be used to diff --git a/typedapi/ccr/putautofollowpattern/response.go b/typedapi/ccr/putautofollowpattern/response.go old mode 100755 new mode 100644 index 1ce0cbc10a..3ca62dde81 --- a/typedapi/ccr/putautofollowpattern/response.go +++ b/typedapi/ccr/putautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putautofollowpattern // Response holds the response body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/resumeautofollowpattern/response.go b/typedapi/ccr/resumeautofollowpattern/response.go old mode 100755 new mode 100644 index 2b6d1e791f..d292181758 --- a/typedapi/ccr/resumeautofollowpattern/response.go +++ b/typedapi/ccr/resumeautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resumeautofollowpattern // Response holds the response body struct for the package resumeautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go old mode 100755 new mode 100644 index 77657708dd..bb91c87314 --- a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go +++ b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Resumes an auto-follow pattern that has been paused package resumeautofollowpattern @@ -172,7 +172,6 @@ func (r ResumeAutoFollowPattern) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/resumefollow/request.go b/typedapi/ccr/resumefollow/request.go old mode 100755 new mode 100644 index f26c2ab2b9..6bed773458 --- a/typedapi/ccr/resumefollow/request.go +++ b/typedapi/ccr/resumefollow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resumefollow @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L47 type Request struct { MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` MaxOutstandingWriteRequests *int64 `json:"max_outstanding_write_requests,omitempty"` diff --git a/typedapi/ccr/resumefollow/response.go b/typedapi/ccr/resumefollow/response.go old mode 100755 new mode 100644 index f86b67c978..07c47db2be --- a/typedapi/ccr/resumefollow/response.go +++ b/typedapi/ccr/resumefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resumefollow // Response holds the response body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/resumefollow/resume_follow.go b/typedapi/ccr/resumefollow/resume_follow.go old mode 100755 new mode 100644 index 224b0e4f84..539a8af118 --- a/typedapi/ccr/resumefollow/resume_follow.go +++ b/typedapi/ccr/resumefollow/resume_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Resumes a follower index that has been paused package resumefollow @@ -205,7 +205,6 @@ func (r ResumeFollow) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/stats/response.go b/typedapi/ccr/stats/response.go old mode 100755 new mode 100644 index b5e90010f4..8d717fa54a --- a/typedapi/ccr/stats/response.go +++ b/typedapi/ccr/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 type Response struct { AutoFollowStats types.AutoFollowStats `json:"auto_follow_stats"` diff --git a/typedapi/ccr/stats/stats.go b/typedapi/ccr/stats/stats.go old mode 100755 new mode 100644 index d51e5f5cc9..b67ad5ec6e --- a/typedapi/ccr/stats/stats.go +++ b/typedapi/ccr/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets all stats related to cross-cluster replication. package stats @@ -159,7 +159,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ccr/unfollow/response.go b/typedapi/ccr/unfollow/response.go old mode 100755 new mode 100644 index 4942db75ad..1563aa1fa8 --- a/typedapi/ccr/unfollow/response.go +++ b/typedapi/ccr/unfollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package unfollow // Response holds the response body struct for the package unfollow // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ccr/unfollow/unfollow.go b/typedapi/ccr/unfollow/unfollow.go old mode 100755 new mode 100644 index 8b07ba24a3..34a1a53a99 --- a/typedapi/ccr/unfollow/unfollow.go +++ b/typedapi/ccr/unfollow/unfollow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops the following task associated with a follower index and removes index // metadata and settings associated with cross-cluster replication. @@ -172,7 +172,6 @@ func (r Unfollow) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/allocationexplain/allocation_explain.go b/typedapi/cluster/allocationexplain/allocation_explain.go old mode 100755 new mode 100644 index fb4c9cda61..ed83d5c027 --- a/typedapi/cluster/allocationexplain/allocation_explain.go +++ b/typedapi/cluster/allocationexplain/allocation_explain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides explanations for shard allocations in the cluster. package allocationexplain @@ -197,7 +197,6 @@ func (r AllocationExplain) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/allocationexplain/request.go b/typedapi/cluster/allocationexplain/request.go old mode 100755 new mode 100644 index f485ad1ab5..44e606b90d --- a/typedapi/cluster/allocationexplain/request.go +++ b/typedapi/cluster/allocationexplain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package allocationexplain @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 type Request struct { // CurrentNode Specifies the node ID or the name of the node to only explain a shard that is diff --git a/typedapi/cluster/allocationexplain/response.go b/typedapi/cluster/allocationexplain/response.go old mode 100755 new mode 100644 index d235cd331e..c90ca37015 --- a/typedapi/cluster/allocationexplain/response.go +++ b/typedapi/cluster/allocationexplain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package allocationexplain @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L61 type Response struct { AllocateExplanation *string `json:"allocate_explanation,omitempty"` diff --git a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go old mode 100755 new mode 100644 index 659314619f..4c27212c75 --- a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go +++ b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a component template package deletecomponenttemplate @@ -168,7 +168,6 @@ func (r DeleteComponentTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/deletecomponenttemplate/response.go b/typedapi/cluster/deletecomponenttemplate/response.go old mode 100755 new mode 100644 index 999cabeef7..dbf664eb0f --- a/typedapi/cluster/deletecomponenttemplate/response.go +++ b/typedapi/cluster/deletecomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletecomponenttemplate // Response holds the response body struct for the package deletecomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go old mode 100755 new mode 100644 index c1e28be914..481f1ecf42 --- a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go +++ b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clears cluster voting config exclusions. package deletevotingconfigexclusions @@ -24,7 +24,6 @@ package deletevotingconfigexclusions import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -142,36 +140,6 @@ func (r DeleteVotingConfigExclusions) Perform(ctx context.Context) (*http.Respon return res, nil } -// Do runs the request through the transport, handle the response and returns a deletevotingconfigexclusions.Response -func (r DeleteVotingConfigExclusions) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r DeleteVotingConfigExclusions) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/cluster/existscomponenttemplate/exists_component_template.go b/typedapi/cluster/existscomponenttemplate/exists_component_template.go old mode 100755 new mode 100644 index 1ca783c963..95f718f000 --- a/typedapi/cluster/existscomponenttemplate/exists_component_template.go +++ b/typedapi/cluster/existscomponenttemplate/exists_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a particular component template exist package existscomponenttemplate @@ -24,7 +24,6 @@ package existscomponenttemplate import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -151,36 +149,6 @@ func (r ExistsComponentTemplate) Perform(ctx context.Context) (*http.Response, e return res, nil } -// Do runs the request through the transport, handle the response and returns a existscomponenttemplate.Response -func (r ExistsComponentTemplate) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r ExistsComponentTemplate) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/cluster/getcomponenttemplate/get_component_template.go b/typedapi/cluster/getcomponenttemplate/get_component_template.go old mode 100755 new mode 100644 index a44386af52..c156661e68 --- a/typedapi/cluster/getcomponenttemplate/get_component_template.go +++ b/typedapi/cluster/getcomponenttemplate/get_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns one or more component templates package getcomponenttemplate @@ -172,7 +172,6 @@ func (r GetComponentTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/getcomponenttemplate/response.go b/typedapi/cluster/getcomponenttemplate/response.go old mode 100755 new mode 100644 index 9b3148ed86..26297923f9 --- a/typedapi/cluster/getcomponenttemplate/response.go +++ b/typedapi/cluster/getcomponenttemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcomponenttemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 type Response struct { ComponentTemplates []types.ClusterComponentTemplate `json:"component_templates"` diff --git a/typedapi/cluster/getsettings/get_settings.go b/typedapi/cluster/getsettings/get_settings.go old mode 100755 new mode 100644 index 44350fb5c3..58d91cdacf --- a/typedapi/cluster/getsettings/get_settings.go +++ b/typedapi/cluster/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns cluster settings. package getsettings @@ -160,7 +160,6 @@ func (r GetSettings) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/getsettings/response.go b/typedapi/cluster/getsettings/response.go old mode 100755 new mode 100644 index b48d3e8f81..b3ecfc2cd0 --- a/typedapi/cluster/getsettings/response.go +++ b/typedapi/cluster/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getsettings @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 type Response struct { Defaults map[string]json.RawMessage `json:"defaults,omitempty"` diff --git a/typedapi/cluster/health/health.go b/typedapi/cluster/health/health.go old mode 100755 new mode 100644 index f00c80fab0..fe19738942 --- a/typedapi/cluster/health/health.go +++ b/typedapi/cluster/health/health.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns basic information about the health of the cluster. package health @@ -173,14 +173,13 @@ func (r Health) Do(ctx context.Context) (*Response, error) { } defer res.Body.Close() - if res.StatusCode < 299 { + if res.StatusCode < 299 || res.StatusCode == 408 { err = json.NewDecoder(res.Body).Decode(response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/health/response.go b/typedapi/cluster/health/response.go old mode 100755 new mode 100644 index 11cba2fe42..cab6980765 --- a/typedapi/cluster/health/response.go +++ b/typedapi/cluster/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package health @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/health/ClusterHealthResponse.ts#L26-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/health/ClusterHealthResponse.ts#L26-L37 type Response struct { diff --git a/typedapi/cluster/pendingtasks/pending_tasks.go b/typedapi/cluster/pendingtasks/pending_tasks.go old mode 100755 new mode 100644 index 3c5c476ba5..cb0fd018a8 --- a/typedapi/cluster/pendingtasks/pending_tasks.go +++ b/typedapi/cluster/pendingtasks/pending_tasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a list of any cluster-level changes (e.g. create index, update // mapping, @@ -164,7 +164,6 @@ func (r PendingTasks) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/pendingtasks/response.go b/typedapi/cluster/pendingtasks/response.go old mode 100755 new mode 100644 index efb4154773..10ca9225d5 --- a/typedapi/cluster/pendingtasks/response.go +++ b/typedapi/cluster/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 type Response struct { Tasks []types.PendingTask `json:"tasks"` diff --git a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go old mode 100755 new mode 100644 index 47f86dff50..4c34a5f009 --- a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go +++ b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the cluster voting config exclusions by node ids or node names. package postvotingconfigexclusions @@ -24,7 +24,6 @@ package postvotingconfigexclusions import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -34,7 +33,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -141,36 +139,6 @@ func (r PostVotingConfigExclusions) Perform(ctx context.Context) (*http.Response return res, nil } -// Do runs the request through the transport, handle the response and returns a postvotingconfigexclusions.Response -func (r PostVotingConfigExclusions) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r PostVotingConfigExclusions) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/cluster/putcomponenttemplate/put_component_template.go b/typedapi/cluster/putcomponenttemplate/put_component_template.go old mode 100755 new mode 100644 index 2ccffc015c..f71908ac4c --- a/typedapi/cluster/putcomponenttemplate/put_component_template.go +++ b/typedapi/cluster/putcomponenttemplate/put_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates a component template package putcomponenttemplate @@ -204,7 +204,6 @@ func (r PutComponentTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/putcomponenttemplate/request.go b/typedapi/cluster/putcomponenttemplate/request.go old mode 100755 new mode 100644 index 80ba313398..f72abae835 --- a/typedapi/cluster/putcomponenttemplate/request.go +++ b/typedapi/cluster/putcomponenttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putcomponenttemplate @@ -29,21 +29,33 @@ import ( // Request holds the request body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L29-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L29-L69 type Request struct { - Aliases map[string]types.AliasDefinition `json:"aliases,omitempty"` - Mappings *types.TypeMapping `json:"mappings,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` - Settings *types.IndexSettings `json:"settings,omitempty"` - Template types.IndexState `json:"template"` - Version *int64 `json:"version,omitempty"` + + // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster + // setting. + // If set to `true` in a template, then indices can be automatically created + // using that + // template even if auto-creation of indices is disabled via + // `actions.auto_create_index`. + // If set to `false` then data streams matching the template must always be + // explicitly created. + AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` + // Meta_ Optional user metadata about the component template. + // May have any contents. This map is not automatically generated by + // Elasticsearch. + Meta_ types.Metadata `json:"_meta,omitempty"` + // Template The template to be applied which includes mappings, settings, or aliases + // configuration. + Template types.IndexState `json:"template"` + // Version Version number used to manage component templates externally. + // This number isn't automatically generated or incremented by Elasticsearch. + Version *int64 `json:"version,omitempty"` } // NewRequest returns a Request func NewRequest() *Request { - r := &Request{ - Aliases: make(map[string]types.AliasDefinition, 0), - } + r := &Request{} return r } diff --git a/typedapi/cluster/putcomponenttemplate/response.go b/typedapi/cluster/putcomponenttemplate/response.go old mode 100755 new mode 100644 index 0850fce57e..bda712ed0e --- a/typedapi/cluster/putcomponenttemplate/response.go +++ b/typedapi/cluster/putcomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putcomponenttemplate // Response holds the response body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/cluster/putsettings/put_settings.go b/typedapi/cluster/putsettings/put_settings.go old mode 100755 new mode 100644 index 9bea87ffc6..3ab62cc09a --- a/typedapi/cluster/putsettings/put_settings.go +++ b/typedapi/cluster/putsettings/put_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the cluster settings. package putsettings @@ -195,7 +195,6 @@ func (r PutSettings) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/putsettings/request.go b/typedapi/cluster/putsettings/request.go old mode 100755 new mode 100644 index 8414f6ee3f..10cef07cf3 --- a/typedapi/cluster/putsettings/request.go +++ b/typedapi/cluster/putsettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putsettings @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 type Request struct { Persistent map[string]json.RawMessage `json:"persistent,omitempty"` Transient map[string]json.RawMessage `json:"transient,omitempty"` diff --git a/typedapi/cluster/putsettings/response.go b/typedapi/cluster/putsettings/response.go old mode 100755 new mode 100644 index 96d5a0d8e5..dea42ce890 --- a/typedapi/cluster/putsettings/response.go +++ b/typedapi/cluster/putsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putsettings @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/cluster/remoteinfo/remote_info.go b/typedapi/cluster/remoteinfo/remote_info.go old mode 100755 new mode 100644 index fa261d08fa..f44860ca21 --- a/typedapi/cluster/remoteinfo/remote_info.go +++ b/typedapi/cluster/remoteinfo/remote_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the information about configured remote clusters. package remoteinfo @@ -159,7 +159,6 @@ func (r RemoteInfo) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/remoteinfo/response.go b/typedapi/cluster/remoteinfo/response.go old mode 100755 new mode 100644 index 1865b26ac5..c14dd41b0d --- a/typedapi/cluster/remoteinfo/response.go +++ b/typedapi/cluster/remoteinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package remoteinfo @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package remoteinfo // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L26 type Response map[string]types.ClusterRemoteInfo diff --git a/typedapi/cluster/reroute/request.go b/typedapi/cluster/reroute/request.go old mode 100755 new mode 100644 index b6c1bb3b8a..45bd11d0c7 --- a/typedapi/cluster/reroute/request.go +++ b/typedapi/cluster/reroute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reroute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 type Request struct { // Commands Defines the commands to perform. diff --git a/typedapi/cluster/reroute/reroute.go b/typedapi/cluster/reroute/reroute.go old mode 100755 new mode 100644 index 12499e4db6..659f672cf8 --- a/typedapi/cluster/reroute/reroute.go +++ b/typedapi/cluster/reroute/reroute.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to manually change the allocation of individual shards in the cluster. package reroute @@ -195,7 +195,6 @@ func (r Reroute) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/reroute/response.go b/typedapi/cluster/reroute/response.go old mode 100755 new mode 100644 index b3f71706be..66ecf54341 --- a/typedapi/cluster/reroute/response.go +++ b/typedapi/cluster/reroute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reroute @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/cluster/state/response.go b/typedapi/cluster/state/response.go old mode 100755 new mode 100644 index 69fe47a098..053fd132d8 --- a/typedapi/cluster/state/response.go +++ b/typedapi/cluster/state/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package state @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package state // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/state/ClusterStateResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/state/ClusterStateResponse.ts#L22-L29 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/cluster/state/state.go b/typedapi/cluster/state/state.go old mode 100755 new mode 100644 index 95b3769af3..1738ba09df --- a/typedapi/cluster/state/state.go +++ b/typedapi/cluster/state/state.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a comprehensive information about the state of the cluster. package state @@ -192,7 +192,6 @@ func (r State) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/cluster/stats/response.go b/typedapi/cluster/stats/response.go old mode 100755 new mode 100644 index 29fc2df040..e1b3fca78f --- a/typedapi/cluster/stats/response.go +++ b/typedapi/cluster/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/ClusterStatsResponse.ts#L55-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/ClusterStatsResponse.ts#L55-L57 type Response struct { diff --git a/typedapi/cluster/stats/stats.go b/typedapi/cluster/stats/stats.go old mode 100755 new mode 100644 index b71fd06ad6..da241de5c1 --- a/typedapi/cluster/stats/stats.go +++ b/typedapi/cluster/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns high-level overview of cluster statistics. package stats @@ -178,7 +178,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/clearscroll/clear_scroll.go b/typedapi/core/clearscroll/clear_scroll.go old mode 100755 new mode 100644 index 0661a51ec7..2d49067e05 --- a/typedapi/core/clearscroll/clear_scroll.go +++ b/typedapi/core/clearscroll/clear_scroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Explicitly clears the search context for a scroll. package clearscroll @@ -203,14 +203,13 @@ func (r ClearScroll) Do(ctx context.Context) (*Response, error) { } defer res.Body.Close() - if res.StatusCode < 299 { + if res.StatusCode < 299 || res.StatusCode == 404 { err = json.NewDecoder(res.Body).Decode(response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/clearscroll/request.go b/typedapi/core/clearscroll/request.go old mode 100755 new mode 100644 index 68dc915d09..04e3504de3 --- a/typedapi/core/clearscroll/request.go +++ b/typedapi/core/clearscroll/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearscroll @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L36 type Request struct { ScrollId []string `json:"scroll_id,omitempty"` } diff --git a/typedapi/core/clearscroll/response.go b/typedapi/core/clearscroll/response.go old mode 100755 new mode 100644 index 6ed6200e80..4595e6808a --- a/typedapi/core/clearscroll/response.go +++ b/typedapi/core/clearscroll/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearscroll // Response holds the response body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` diff --git a/typedapi/core/closepointintime/close_point_in_time.go b/typedapi/core/closepointintime/close_point_in_time.go old mode 100755 new mode 100644 index a78650609a..c4ce630bb7 --- a/typedapi/core/closepointintime/close_point_in_time.go +++ b/typedapi/core/closepointintime/close_point_in_time.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Close a point in time package closepointintime @@ -185,14 +185,13 @@ func (r ClosePointInTime) Do(ctx context.Context) (*Response, error) { } defer res.Body.Close() - if res.StatusCode < 299 { + if res.StatusCode < 299 || res.StatusCode == 404 { err = json.NewDecoder(res.Body).Decode(response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/closepointintime/request.go b/typedapi/core/closepointintime/request.go old mode 100755 new mode 100644 index 13a2ddd55b..5609e0f1d6 --- a/typedapi/core/closepointintime/request.go +++ b/typedapi/core/closepointintime/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package closepointintime @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L33 type Request struct { Id string `json:"id"` } diff --git a/typedapi/core/closepointintime/response.go b/typedapi/core/closepointintime/response.go old mode 100755 new mode 100644 index 5a3f774039..9f6cefc198 --- a/typedapi/core/closepointintime/response.go +++ b/typedapi/core/closepointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package closepointintime // Response holds the response body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` diff --git a/typedapi/core/count/count.go b/typedapi/core/count/count.go old mode 100755 new mode 100644 index 00cac3edf4..89c8bf4a4e --- a/typedapi/core/count/count.go +++ b/typedapi/core/count/count.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns number of documents matching a query. package count @@ -209,7 +209,6 @@ func (r Count) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/count/request.go b/typedapi/core/count/request.go old mode 100755 new mode 100644 index 31f91209cb..f75427bb6f --- a/typedapi/core/count/request.go +++ b/typedapi/core/count/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package count @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/count/CountRequest.ts#L26-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/count/CountRequest.ts#L26-L54 type Request struct { Query *types.Query `json:"query,omitempty"` } diff --git a/typedapi/core/count/response.go b/typedapi/core/count/response.go old mode 100755 new mode 100644 index 6f70ac6eaf..324a7e9bc3 --- a/typedapi/core/count/response.go +++ b/typedapi/core/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/count/CountResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/count/CountResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/core/create/create.go b/typedapi/core/create/create.go old mode 100755 new mode 100644 index a4587297c6..1d41a8a9b9 --- a/typedapi/core/create/create.go +++ b/typedapi/core/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new document in the index. // @@ -220,7 +220,6 @@ func (r Create) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/create/response.go b/typedapi/core/create/response.go old mode 100755 new mode 100644 index 0fb29bc592..b6d32228cb --- a/typedapi/core/create/response.go +++ b/typedapi/core/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package create @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/create/CreateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/create/CreateResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` diff --git a/typedapi/core/delete/delete.go b/typedapi/core/delete/delete.go old mode 100755 new mode 100644 index 19a9efb560..87e9235d8a --- a/typedapi/core/delete/delete.go +++ b/typedapi/core/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes a document from the index. package delete @@ -172,14 +172,13 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } defer res.Body.Close() - if res.StatusCode < 299 { + if res.StatusCode < 299 || res.StatusCode == 404 { err = json.NewDecoder(res.Body).Decode(response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/delete/response.go b/typedapi/core/delete/response.go old mode 100755 new mode 100644 index e69163ff4d..78aac0ebc7 --- a/typedapi/core/delete/response.go +++ b/typedapi/core/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/delete/DeleteResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/delete/DeleteResponse.ts#L22-L34 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` diff --git a/typedapi/core/deletebyquery/delete_by_query.go b/typedapi/core/deletebyquery/delete_by_query.go old mode 100755 new mode 100644 index ef7cd6bb9f..20e184855c --- a/typedapi/core/deletebyquery/delete_by_query.go +++ b/typedapi/core/deletebyquery/delete_by_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes documents matching the provided query. package deletebyquery @@ -208,7 +208,6 @@ func (r DeleteByQuery) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/deletebyquery/request.go b/typedapi/core/deletebyquery/request.go old mode 100755 new mode 100644 index c005fd58c8..8e3919b93d --- a/typedapi/core/deletebyquery/request.go +++ b/typedapi/core/deletebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletebyquery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L81 type Request struct { MaxDocs *int64 `json:"max_docs,omitempty"` Query *types.Query `json:"query,omitempty"` diff --git a/typedapi/core/deletebyquery/response.go b/typedapi/core/deletebyquery/response.go old mode 100755 new mode 100644 index b97e36ca61..d5e7c3db29 --- a/typedapi/core/deletebyquery/response.go +++ b/typedapi/core/deletebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` diff --git a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go old mode 100755 new mode 100644 index 64745eb255..d7a181d279 --- a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go +++ b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Changes the number of requests per second for a particular Delete By Query // operation. @@ -172,7 +172,6 @@ func (r DeleteByQueryRethrottle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/deletebyqueryrethrottle/response.go b/typedapi/core/deletebyqueryrethrottle/response.go old mode 100755 new mode 100644 index bda473c671..d4de597d21 --- a/typedapi/core/deletebyqueryrethrottle/response.go +++ b/typedapi/core/deletebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletebyqueryrethrottle @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package deletebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` @@ -73,6 +73,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "nodes": + if s.Nodes == nil { + s.Nodes = make(map[string]types.NodeTasks, 0) + } if err := dec.Decode(&s.Nodes); err != nil { return err } @@ -83,8 +86,24 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "tasks": - if err := dec.Decode(&s.Tasks); err != nil { - return err + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]types.ParentTaskInfo, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o + case '[': + o := []types.TaskInfo{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o } } diff --git a/typedapi/core/deletescript/delete_script.go b/typedapi/core/deletescript/delete_script.go old mode 100755 new mode 100644 index 0c6a0173cb..11f73c5c83 --- a/typedapi/core/deletescript/delete_script.go +++ b/typedapi/core/deletescript/delete_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a script. package deletescript @@ -168,7 +168,6 @@ func (r DeleteScript) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/deletescript/response.go b/typedapi/core/deletescript/response.go old mode 100755 new mode 100644 index 925324b06b..9e410744f7 --- a/typedapi/core/deletescript/response.go +++ b/typedapi/core/deletescript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletescript // Response holds the response body struct for the package deletescript // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/core/exists/exists.go b/typedapi/core/exists/exists.go old mode 100755 new mode 100644 index e445b9419c..6f1420fac7 --- a/typedapi/core/exists/exists.go +++ b/typedapi/core/exists/exists.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a document exists in an index. package exists @@ -24,7 +24,6 @@ package exists import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" ) @@ -161,36 +159,6 @@ func (r Exists) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a exists.Response -func (r Exists) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r Exists) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/core/existssource/exists_source.go b/typedapi/core/existssource/exists_source.go old mode 100755 new mode 100644 index 406c78d2da..6ebe097b77 --- a/typedapi/core/existssource/exists_source.go +++ b/typedapi/core/existssource/exists_source.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a document source exists in an index. package existssource @@ -24,7 +24,6 @@ package existssource import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" ) @@ -161,36 +159,6 @@ func (r ExistsSource) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a existssource.Response -func (r ExistsSource) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r ExistsSource) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/core/existssource/response.go b/typedapi/core/existssource/response.go deleted file mode 100755 index 10200535c6..0000000000 --- a/typedapi/core/existssource/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package existssource - -// Response holds the response body struct for the package existssource -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/exists_source/SourceExistsResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/core/explain/explain.go b/typedapi/core/explain/explain.go old mode 100755 new mode 100644 index 899c185e3f..59d85e06e1 --- a/typedapi/core/explain/explain.go +++ b/typedapi/core/explain/explain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about why a specific matches (or doesn't match) a query. package explain @@ -214,7 +214,6 @@ func (r Explain) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/explain/request.go b/typedapi/core/explain/request.go old mode 100755 new mode 100644 index cdf761d027..10b1b7ac61 --- a/typedapi/core/explain/request.go +++ b/typedapi/core/explain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explain @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/explain/ExplainRequest.ts#L26-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/explain/ExplainRequest.ts#L26-L53 type Request struct { Query *types.Query `json:"query,omitempty"` } diff --git a/typedapi/core/explain/response.go b/typedapi/core/explain/response.go old mode 100755 new mode 100644 index 6c845c892d..9ba9018ef3 --- a/typedapi/core/explain/response.go +++ b/typedapi/core/explain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explain @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/explain/ExplainResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/explain/ExplainResponse.ts#L23-L31 type Response struct { Explanation *types.ExplanationDetail `json:"explanation,omitempty"` diff --git a/typedapi/core/fieldcaps/field_caps.go b/typedapi/core/fieldcaps/field_caps.go old mode 100755 new mode 100644 index da59a7e477..8d511f73db --- a/typedapi/core/fieldcaps/field_caps.go +++ b/typedapi/core/fieldcaps/field_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the information about the capabilities of fields among multiple // indices. @@ -209,7 +209,6 @@ func (r FieldCaps) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/fieldcaps/request.go b/typedapi/core/fieldcaps/request.go old mode 100755 new mode 100644 index 3b7b860ce6..42a5838656 --- a/typedapi/core/fieldcaps/request.go +++ b/typedapi/core/fieldcaps/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package fieldcaps @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L95 type Request struct { // Fields List of fields to retrieve capabilities for. Wildcard (`*`) expressions are @@ -42,7 +42,7 @@ type Request struct { // search requests. // These fields exist only as part of the query and take precedence over fields // defined with the same name in the index mappings. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` } // NewRequest returns a Request diff --git a/typedapi/core/fieldcaps/response.go b/typedapi/core/fieldcaps/response.go old mode 100755 new mode 100644 index c278e795b6..7922732e89 --- a/typedapi/core/fieldcaps/response.go +++ b/typedapi/core/fieldcaps/response.go @@ -16,17 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package fieldcaps import ( + "bytes" + "encoding/json" + "errors" + "io" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 type Response struct { Fields map[string]map[string]types.FieldCapability `json:"fields"` @@ -40,3 +45,46 @@ func NewResponse() *Response { } return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]map[string]types.FieldCapability, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + } + } + return nil +} diff --git a/typedapi/core/get/get.go b/typedapi/core/get/get.go old mode 100755 new mode 100644 index 4b2bb4ee4d..d0006f69c7 --- a/typedapi/core/get/get.go +++ b/typedapi/core/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a document. package get @@ -179,7 +179,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/get/response.go b/typedapi/core/get/response.go old mode 100755 new mode 100644 index 2292f4ca4a..e0885a9848 --- a/typedapi/core/get/response.go +++ b/typedapi/core/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get/GetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get/GetResponse.ts#L22-L24 type Response struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` diff --git a/typedapi/core/getscript/get_script.go b/typedapi/core/getscript/get_script.go old mode 100755 new mode 100644 index ffe92b03a8..c7412ca213 --- a/typedapi/core/getscript/get_script.go +++ b/typedapi/core/getscript/get_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a script. package getscript @@ -168,7 +168,6 @@ func (r GetScript) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/getscript/response.go b/typedapi/core/getscript/response.go old mode 100755 new mode 100644 index 2dd3d9d24e..f78d766a06 --- a/typedapi/core/getscript/response.go +++ b/typedapi/core/getscript/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getscript @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscript // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script/GetScriptResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script/GetScriptResponse.ts#L23-L29 type Response struct { Found bool `json:"found"` diff --git a/typedapi/core/getscriptcontext/get_script_context.go b/typedapi/core/getscriptcontext/get_script_context.go old mode 100755 new mode 100644 index f79edd771b..fb355d41e9 --- a/typedapi/core/getscriptcontext/get_script_context.go +++ b/typedapi/core/getscriptcontext/get_script_context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns all script contexts. package getscriptcontext @@ -157,7 +157,6 @@ func (r GetScriptContext) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/getscriptcontext/response.go b/typedapi/core/getscriptcontext/response.go old mode 100755 new mode 100644 index 34b05b9116..730ba0f837 --- a/typedapi/core/getscriptcontext/response.go +++ b/typedapi/core/getscriptcontext/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getscriptcontext @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptcontext // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 type Response struct { Contexts []types.GetScriptContext `json:"contexts"` diff --git a/typedapi/core/getscriptlanguages/get_script_languages.go b/typedapi/core/getscriptlanguages/get_script_languages.go old mode 100755 new mode 100644 index 9dacd460de..7b882c4700 --- a/typedapi/core/getscriptlanguages/get_script_languages.go +++ b/typedapi/core/getscriptlanguages/get_script_languages.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns available script types, languages and contexts package getscriptlanguages @@ -157,7 +157,6 @@ func (r GetScriptLanguages) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/getscriptlanguages/response.go b/typedapi/core/getscriptlanguages/response.go old mode 100755 new mode 100644 index ad8011c59c..f3dfaaf9e1 --- a/typedapi/core/getscriptlanguages/response.go +++ b/typedapi/core/getscriptlanguages/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getscriptlanguages @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptlanguages // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 type Response struct { LanguageContexts []types.LanguageContext `json:"language_contexts"` diff --git a/typedapi/core/getsource/get_source.go b/typedapi/core/getsource/get_source.go old mode 100755 new mode 100644 index 1b5d2255dd..154bbc9b2f --- a/typedapi/core/getsource/get_source.go +++ b/typedapi/core/getsource/get_source.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the source of a document. package getsource @@ -179,7 +179,6 @@ func (r GetSource) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/getsource/response.go b/typedapi/core/getsource/response.go old mode 100755 new mode 100644 index 762c26fe7d..6e3b2144d2 --- a/typedapi/core/getsource/response.go +++ b/typedapi/core/getsource/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getsource @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package getsource // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_source/SourceResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_source/SourceResponse.ts#L20-L22 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/core/healthreport/health_report.go b/typedapi/core/healthreport/health_report.go new file mode 100644 index 0000000000..ba92af9233 --- /dev/null +++ b/typedapi/core/healthreport/health_report.go @@ -0,0 +1,245 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Returns the health of the cluster. +package healthreport + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + featureMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type HealthReport struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + paramSet int + + feature string +} + +// NewHealthReport type alias for index. +type NewHealthReport func() *HealthReport + +// NewHealthReportFunc returns a new instance of HealthReport with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewHealthReportFunc(tp elastictransport.Interface) NewHealthReport { + return func() *HealthReport { + n := New(tp) + + return n + } +} + +// Returns the health of the cluster. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/health-api.html +func New(tp elastictransport.Interface) *HealthReport { + r := &HealthReport{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *HealthReport) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_health_report") + + method = http.MethodGet + case r.paramSet == featureMask: + path.WriteString("/") + path.WriteString("_health_report") + path.WriteString("/") + + path.WriteString(r.feature) + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r HealthReport) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the HealthReport query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a healthreport.Response +func (r HealthReport) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r HealthReport) IsSuccess(ctx context.Context) (bool, error) { + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + return false, nil +} + +// Header set a key, value pair in the HealthReport headers map. +func (r *HealthReport) Header(key, value string) *HealthReport { + r.headers.Set(key, value) + + return r +} + +// Feature A feature of the cluster, as returned by the top-level health report API. +// API Name: feature +func (r *HealthReport) Feature(v ...string) *HealthReport { + r.paramSet |= featureMask + r.feature = strings.Join(v, ",") + + return r +} + +// Timeout Explicit operation timeout. +// API name: timeout +func (r *HealthReport) Timeout(v string) *HealthReport { + r.values.Set("timeout", v) + + return r +} + +// Verbose Opt-in for more information about the health of the system. +// API name: verbose +func (r *HealthReport) Verbose(b bool) *HealthReport { + r.values.Set("verbose", strconv.FormatBool(b)) + + return r +} + +// Size Limit the number of affected resources the health report API returns. +// API name: size +func (r *HealthReport) Size(i int) *HealthReport { + r.values.Set("size", strconv.Itoa(i)) + + return r +} diff --git a/typedapi/cluster/postvotingconfigexclusions/response.go b/typedapi/core/healthreport/response.go old mode 100755 new mode 100644 similarity index 65% rename from typedapi/cluster/postvotingconfigexclusions/response.go rename to typedapi/core/healthreport/response.go index 85931c15f4..05cb0cac5c --- a/typedapi/cluster/postvotingconfigexclusions/response.go +++ b/typedapi/core/healthreport/response.go @@ -16,15 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package postvotingconfigexclusions +package healthreport -// Response holds the response body struct for the package postvotingconfigexclusions +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package healthreport // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/post_voting_config_exclusions/ClusterPostVotingConfigExclusionsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/Response.ts#L22-L27 type Response struct { + ClusterName string `json:"cluster_name"` + Indicators types.Indicators `json:"indicators"` } // NewResponse returns a Response diff --git a/typedapi/core/index/index.go b/typedapi/core/index/index.go old mode 100755 new mode 100644 index 282c09ff3a..94f40354a8 --- a/typedapi/core/index/index.go +++ b/typedapi/core/index/index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates a document in an index. package index @@ -222,7 +222,6 @@ func (r Index) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/index/response.go b/typedapi/core/index/response.go old mode 100755 new mode 100644 index 6e0d092e47..b55f6d30ef --- a/typedapi/core/index/response.go +++ b/typedapi/core/index/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package index @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package index // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/index/IndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/index/IndexResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` diff --git a/typedapi/core/info/info.go b/typedapi/core/info/info.go old mode 100755 new mode 100644 index 3180be4cd6..e63ba67617 --- a/typedapi/core/info/info.go +++ b/typedapi/core/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns basic information about the cluster. package info @@ -155,7 +155,6 @@ func (r Info) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/info/response.go b/typedapi/core/info/response.go old mode 100755 new mode 100644 index 522ea28227..baacc30530 --- a/typedapi/core/info/response.go +++ b/typedapi/core/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/core/knnsearch/knn_search.go b/typedapi/core/knnsearch/knn_search.go old mode 100755 new mode 100644 index c10cffc299..dd526e033d --- a/typedapi/core/knnsearch/knn_search.go +++ b/typedapi/core/knnsearch/knn_search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs a kNN search. package knnsearch @@ -203,7 +203,6 @@ func (r KnnSearch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/knnsearch/request.go b/typedapi/core/knnsearch/request.go old mode 100755 new mode 100644 index 159436ddbf..957e8cf5a5 --- a/typedapi/core/knnsearch/request.go +++ b/typedapi/core/knnsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package knnsearch @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 type Request struct { // DocvalueFields The request returns doc values for field names matching these patterns diff --git a/typedapi/core/knnsearch/response.go b/typedapi/core/knnsearch/response.go old mode 100755 new mode 100644 index 8fab8baee4..1509b2e070 --- a/typedapi/core/knnsearch/response.go +++ b/typedapi/core/knnsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package knnsearch @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 type Response struct { diff --git a/typedapi/core/mget/mget.go b/typedapi/core/mget/mget.go old mode 100755 new mode 100644 index e2a4bb8df6..1bc81f3fc4 --- a/typedapi/core/mget/mget.go +++ b/typedapi/core/mget/mget.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to get multiple documents in one request. package mget @@ -207,7 +207,6 @@ func (r Mget) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/mget/request.go b/typedapi/core/mget/request.go old mode 100755 new mode 100644 index 00358a67cc..a55da3edfe --- a/typedapi/core/mget/request.go +++ b/typedapi/core/mget/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mget @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mget/MultiGetRequest.ts#L25-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mget/MultiGetRequest.ts#L25-L91 type Request struct { // Docs The documents you want to retrieve. Required if no index is specified in the diff --git a/typedapi/core/mget/response.go b/typedapi/core/mget/response.go old mode 100755 new mode 100644 index 235de0c3fc..28538c6fc3 --- a/typedapi/core/mget/response.go +++ b/typedapi/core/mget/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mget @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mget/MultiGetResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mget/MultiGetResponse.ts#L22-L26 type Response struct { Docs []types.ResponseItem `json:"docs"` diff --git a/typedapi/core/mtermvectors/mtermvectors.go b/typedapi/core/mtermvectors/mtermvectors.go old mode 100755 new mode 100644 index 5ebb308c52..d41d79965b --- a/typedapi/core/mtermvectors/mtermvectors.go +++ b/typedapi/core/mtermvectors/mtermvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns multiple termvectors in one request. package mtermvectors @@ -209,7 +209,6 @@ func (r Mtermvectors) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/mtermvectors/request.go b/typedapi/core/mtermvectors/request.go old mode 100755 new mode 100644 index 4f16742aa8..b0c79567d8 --- a/typedapi/core/mtermvectors/request.go +++ b/typedapi/core/mtermvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mtermvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L58 type Request struct { Docs []types.MTermVectorsOperation `json:"docs,omitempty"` Ids []string `json:"ids,omitempty"` diff --git a/typedapi/core/mtermvectors/response.go b/typedapi/core/mtermvectors/response.go old mode 100755 new mode 100644 index 9751c25acf..37cf6a1c80 --- a/typedapi/core/mtermvectors/response.go +++ b/typedapi/core/mtermvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mtermvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 type Response struct { Docs []types.TermVectorsResult `json:"docs"` diff --git a/typedapi/core/openpointintime/open_point_in_time.go b/typedapi/core/openpointintime/open_point_in_time.go old mode 100755 new mode 100644 index 62f5975aa0..7cbfb3b66f --- a/typedapi/core/openpointintime/open_point_in_time.go +++ b/typedapi/core/openpointintime/open_point_in_time.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Open a point in time that can be used in subsequent searches package openpointintime @@ -169,7 +169,6 @@ func (r OpenPointInTime) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/openpointintime/response.go b/typedapi/core/openpointintime/response.go old mode 100755 new mode 100644 index 859a9270b4..ecdefa2531 --- a/typedapi/core/openpointintime/response.go +++ b/typedapi/core/openpointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package openpointintime // Response holds the response body struct for the package openpointintime // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 type Response struct { Id string `json:"id"` diff --git a/typedapi/core/ping/ping.go b/typedapi/core/ping/ping.go old mode 100755 new mode 100644 index 19d8e12814..9bf4642316 --- a/typedapi/core/ping/ping.go +++ b/typedapi/core/ping/ping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns whether the cluster is running. package ping @@ -24,7 +24,6 @@ package ping import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -34,7 +33,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -137,36 +135,6 @@ func (r Ping) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a ping.Response -func (r Ping) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r Ping) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/core/putscript/put_script.go b/typedapi/core/putscript/put_script.go old mode 100755 new mode 100644 index fd4abf9ea3..c9e03fec61 --- a/typedapi/core/putscript/put_script.go +++ b/typedapi/core/putscript/put_script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates a script. package putscript @@ -217,7 +217,6 @@ func (r PutScript) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/putscript/request.go b/typedapi/core/putscript/request.go old mode 100755 new mode 100644 index b5a4ec206f..96a3936aca --- a/typedapi/core/putscript/request.go +++ b/typedapi/core/putscript/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putscript @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/put_script/PutScriptRequest.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/put_script/PutScriptRequest.ts#L25-L42 type Request struct { Script types.StoredScript `json:"script"` } diff --git a/typedapi/core/putscript/response.go b/typedapi/core/putscript/response.go old mode 100755 new mode 100644 index 8f1114ab67..99c7b2b684 --- a/typedapi/core/putscript/response.go +++ b/typedapi/core/putscript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putscript // Response holds the response body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/put_script/PutScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/put_script/PutScriptResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/core/rankeval/rank_eval.go b/typedapi/core/rankeval/rank_eval.go old mode 100755 new mode 100644 index e305134b55..73e87a8bb3 --- a/typedapi/core/rankeval/rank_eval.go +++ b/typedapi/core/rankeval/rank_eval.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to evaluate the quality of ranked search results over a set of typical // search queries @@ -209,7 +209,6 @@ func (r RankEval) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/rankeval/request.go b/typedapi/core/rankeval/request.go old mode 100755 new mode 100644 index c764a0d32f..7426ca5916 --- a/typedapi/core/rankeval/request.go +++ b/typedapi/core/rankeval/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rankeval @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 type Request struct { // Metric Definition of the evaluation metric to calculate. diff --git a/typedapi/core/rankeval/response.go b/typedapi/core/rankeval/response.go old mode 100755 new mode 100644 index 1a368f2543..f5452fc073 --- a/typedapi/core/rankeval/response.go +++ b/typedapi/core/rankeval/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rankeval @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 type Response struct { diff --git a/typedapi/core/reindex/reindex.go b/typedapi/core/reindex/reindex.go old mode 100755 new mode 100644 index 848bdcbbef..e88740fe58 --- a/typedapi/core/reindex/reindex.go +++ b/typedapi/core/reindex/reindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to copy documents from one index to another, optionally filtering the // source @@ -201,7 +201,6 @@ func (r Reindex) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/reindex/request.go b/typedapi/core/reindex/request.go old mode 100755 new mode 100644 index ef3974115a..916b0812f8 --- a/typedapi/core/reindex/request.go +++ b/typedapi/core/reindex/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reindex @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex/ReindexRequest.ts#L27-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex/ReindexRequest.ts#L27-L51 type Request struct { Conflicts *conflicts.Conflicts `json:"conflicts,omitempty"` Dest types.ReindexDestination `json:"dest"` diff --git a/typedapi/core/reindex/response.go b/typedapi/core/reindex/response.go old mode 100755 new mode 100644 index c36de5ecb5..1bc48a24b1 --- a/typedapi/core/reindex/response.go +++ b/typedapi/core/reindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex/ReindexResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex/ReindexResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` diff --git a/typedapi/core/reindexrethrottle/reindex_rethrottle.go b/typedapi/core/reindexrethrottle/reindex_rethrottle.go old mode 100755 new mode 100644 index c71edac26f..222045f9ae --- a/typedapi/core/reindexrethrottle/reindex_rethrottle.go +++ b/typedapi/core/reindexrethrottle/reindex_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Changes the number of requests per second for a particular Reindex operation. package reindexrethrottle @@ -170,7 +170,6 @@ func (r ReindexRethrottle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/reindexrethrottle/response.go b/typedapi/core/reindexrethrottle/response.go old mode 100755 new mode 100644 index ae29ab3637..a5a0ee440b --- a/typedapi/core/reindexrethrottle/response.go +++ b/typedapi/core/reindexrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reindexrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindexrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.ReindexNode `json:"nodes"` diff --git a/typedapi/core/rendersearchtemplate/render_search_template.go b/typedapi/core/rendersearchtemplate/render_search_template.go old mode 100755 new mode 100644 index 979361780c..69a61a679a --- a/typedapi/core/rendersearchtemplate/render_search_template.go +++ b/typedapi/core/rendersearchtemplate/render_search_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to use the Mustache language to pre-render a search definition. package rendersearchtemplate @@ -210,7 +210,6 @@ func (r RenderSearchTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/rendersearchtemplate/request.go b/typedapi/core/rendersearchtemplate/request.go old mode 100755 new mode 100644 index c29e8baf6d..be2246b315 --- a/typedapi/core/rendersearchtemplate/request.go +++ b/typedapi/core/rendersearchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rendersearchtemplate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L39 type Request struct { File *string `json:"file,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` diff --git a/typedapi/core/rendersearchtemplate/response.go b/typedapi/core/rendersearchtemplate/response.go old mode 100755 new mode 100644 index 1ab4c2f2e5..dfbacdb20d --- a/typedapi/core/rendersearchtemplate/response.go +++ b/typedapi/core/rendersearchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rendersearchtemplate @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 type Response struct { TemplateOutput map[string]json.RawMessage `json:"template_output"` diff --git a/typedapi/core/scriptspainlessexecute/request.go b/typedapi/core/scriptspainlessexecute/request.go old mode 100755 new mode 100644 index c7b946f7fc..4df263a4f7 --- a/typedapi/core/scriptspainlessexecute/request.go +++ b/typedapi/core/scriptspainlessexecute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package scriptspainlessexecute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L35 type Request struct { Context *string `json:"context,omitempty"` ContextSetup *types.PainlessContextSetup `json:"context_setup,omitempty"` diff --git a/typedapi/core/scriptspainlessexecute/response.go b/typedapi/core/scriptspainlessexecute/response.go old mode 100755 new mode 100644 index d5b58a50a5..5c569ce0eb --- a/typedapi/core/scriptspainlessexecute/response.go +++ b/typedapi/core/scriptspainlessexecute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package scriptspainlessexecute @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 type Response struct { Result json.RawMessage `json:"result,omitempty"` diff --git a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go old mode 100755 new mode 100644 index 5934e60904..6cab00de78 --- a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go +++ b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows an arbitrary script to be executed and a result to be returned package scriptspainlessexecute @@ -196,7 +196,6 @@ func (r ScriptsPainlessExecute) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/scroll/request.go b/typedapi/core/scroll/request.go old mode 100755 new mode 100644 index 90a8423563..4a85c07a44 --- a/typedapi/core/scroll/request.go +++ b/typedapi/core/scroll/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package scroll @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/scroll/ScrollRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/scroll/ScrollRequest.ts#L24-L59 type Request struct { // Scroll Period to retain the search context for scrolling. diff --git a/typedapi/core/scroll/response.go b/typedapi/core/scroll/response.go old mode 100755 new mode 100644 index 24e9888677..6fa8c9bd4c --- a/typedapi/core/scroll/response.go +++ b/typedapi/core/scroll/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package scroll @@ -25,6 +25,7 @@ import ( "encoding/json" "errors" "io" + "strconv" "strings" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/scroll/ScrollResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/scroll/ScrollResponse.ts#L22-L24 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` @@ -76,6 +77,10 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -88,415 +93,494 @@ func (s *Response) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } switch elems[0] { + case "cardinality": o := types.NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := types.NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := types.NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := types.NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := types.NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := types.NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := types.NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := types.NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := types.NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := types.NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := types.NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := types.NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := types.NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := types.NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := types.NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := types.NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := types.NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := types.NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := types.NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := types.NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := types.NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := types.NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := types.NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := types.NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := types.NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := types.NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := types.NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := types.NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := types.NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := types.NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := types.NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := types.NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := types.NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := types.NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := types.NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := types.NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := types.NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := types.NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := types.NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := types.NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := types.NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := types.NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := types.NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := types.NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := types.NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := types.NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := types.NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := types.NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := types.NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := types.NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := types.NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := types.NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := types.NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := types.NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -523,6 +607,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -533,13 +620,34 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := types.Float64(value) + s.MaxScore = &f + case float64: + f := types.Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -563,23 +671,54 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]types.Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/core/scroll/scroll.go b/typedapi/core/scroll/scroll.go old mode 100755 new mode 100644 index 2b659e868b..a2485b4510 --- a/typedapi/core/scroll/scroll.go +++ b/typedapi/core/scroll/scroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to retrieve a large numbers of results from a single search request. package scroll @@ -211,7 +211,6 @@ func (r Scroll) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/search/request.go b/typedapi/core/search/request.go old mode 100755 new mode 100644 index 53ad0659a9..04736ec877 --- a/typedapi/core/search/request.go +++ b/typedapi/core/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/SearchRequest.ts#L52-L245 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/SearchRequest.ts#L52-L245 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -67,7 +67,7 @@ type Request struct { Rescore []types.Rescore `json:"rescore,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. These fields take // precedence over mapped fields with the same name. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Retrieve a script evaluation (based on different fields) for each hit. ScriptFields map[string]types.ScriptField `json:"script_fields,omitempty"` SearchAfter []types.FieldValue `json:"search_after,omitempty"` diff --git a/typedapi/core/search/response.go b/typedapi/core/search/response.go old mode 100755 new mode 100644 index 7e7b70ae78..e0ea5d3e40 --- a/typedapi/core/search/response.go +++ b/typedapi/core/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -25,6 +25,7 @@ import ( "encoding/json" "errors" "io" + "strconv" "strings" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/SearchResponse.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/SearchResponse.ts#L34-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` @@ -76,6 +77,10 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -88,415 +93,494 @@ func (s *Response) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } switch elems[0] { + case "cardinality": o := types.NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := types.NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := types.NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := types.NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := types.NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := types.NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := types.NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := types.NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := types.NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := types.NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := types.NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := types.NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := types.NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := types.NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := types.NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := types.NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := types.NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := types.NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := types.NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := types.NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := types.NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := types.NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := types.NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := types.NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := types.NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := types.NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := types.NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := types.NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := types.NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := types.NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := types.NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := types.NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := types.NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := types.NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := types.NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := types.NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := types.NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := types.NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := types.NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := types.NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := types.NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := types.NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := types.NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := types.NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := types.NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := types.NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := types.NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := types.NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := types.NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := types.NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := types.NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := types.NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := types.NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := types.NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -523,6 +607,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -533,13 +620,34 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := types.Float64(value) + s.MaxScore = &f + case float64: + f := types.Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -563,23 +671,54 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]types.Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/core/search/search.go b/typedapi/core/search/search.go old mode 100755 new mode 100644 index c20c2428d0..1a9fe5db4e --- a/typedapi/core/search/search.go +++ b/typedapi/core/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns results matching a query. package search @@ -213,7 +213,6 @@ func (r Search) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/searchmvt/request.go b/typedapi/core/searchmvt/request.go old mode 100755 new mode 100644 index 00d3a7b106..ec430b4610 --- a/typedapi/core/searchmvt/request.go +++ b/typedapi/core/searchmvt/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package searchmvt @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 type Request struct { // Aggs Sub-aggregations for the geotile_grid. @@ -80,7 +80,7 @@ type Request struct { Query *types.Query `json:"query,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. These fields take // precedence over mapped fields with the same name. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // Size Maximum number of features to return in the hits layer. Accepts 0-10000. // If 0, results don’t include the hits layer. Size *int `json:"size,omitempty"` diff --git a/typedapi/core/searchmvt/response.go b/typedapi/core/searchmvt/response.go old mode 100755 new mode 100644 index 593b08da69..8992722e44 --- a/typedapi/core/searchmvt/response.go +++ b/typedapi/core/searchmvt/response.go @@ -16,15 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package searchmvt // Response holds the response body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L24 -type Response []byte +type Response = []byte // NewResponse returns a Response func NewResponse() Response { diff --git a/typedapi/core/searchmvt/search_mvt.go b/typedapi/core/searchmvt/search_mvt.go old mode 100755 new mode 100644 index b08d478363..b5b4a00263 --- a/typedapi/core/searchmvt/search_mvt.go +++ b/typedapi/core/searchmvt/search_mvt.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Searches a vector tile for geospatial values. Returns results as a binary // Mapbox vector tile. @@ -241,7 +241,6 @@ func (r SearchMvt) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/searchshards/response.go b/typedapi/core/searchshards/response.go old mode 100755 new mode 100644 index 9a7ba2372d..5500f7b055 --- a/typedapi/core/searchshards/response.go +++ b/typedapi/core/searchshards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package searchshards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package searchshards // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 type Response struct { Indices map[string]types.ShardStoreIndex `json:"indices"` diff --git a/typedapi/core/searchshards/search_shards.go b/typedapi/core/searchshards/search_shards.go old mode 100755 new mode 100644 index 5511346275..a3cc4fe981 --- a/typedapi/core/searchshards/search_shards.go +++ b/typedapi/core/searchshards/search_shards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about the indices and shards that a search request would // be executed against. @@ -174,7 +174,6 @@ func (r SearchShards) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/searchtemplate/request.go b/typedapi/core/searchtemplate/request.go old mode 100755 new mode 100644 index fecd22ea21..84b3b6c755 --- a/typedapi/core/searchtemplate/request.go +++ b/typedapi/core/searchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package searchtemplate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_template/SearchTemplateRequest.ts#L32-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_template/SearchTemplateRequest.ts#L32-L96 type Request struct { Explain *bool `json:"explain,omitempty"` // Id ID of the search template to use. If no source is specified, diff --git a/typedapi/core/searchtemplate/response.go b/typedapi/core/searchtemplate/response.go old mode 100755 new mode 100644 index fbc0eab640..f890645339 --- a/typedapi/core/searchtemplate/response.go +++ b/typedapi/core/searchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package searchtemplate @@ -25,6 +25,7 @@ import ( "encoding/json" "errors" "io" + "strconv" "strings" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` @@ -76,6 +77,10 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -88,415 +93,494 @@ func (s *Response) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } switch elems[0] { + case "cardinality": o := types.NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := types.NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := types.NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := types.NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := types.NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := types.NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := types.NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := types.NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := types.NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := types.NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := types.NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := types.NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := types.NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := types.NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := types.NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := types.NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := types.NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := types.NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := types.NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := types.NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := types.NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := types.NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := types.NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := types.NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := types.NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := types.NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := types.NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := types.NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := types.NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := types.NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := types.NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := types.NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := types.NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := types.NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := types.NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := types.NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := types.NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := types.NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := types.NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := types.NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := types.NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := types.NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := types.NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := types.NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := types.NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := types.NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := types.NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := types.NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := types.NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := types.NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := types.NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := types.NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := types.NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := types.NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -523,6 +607,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -533,13 +620,34 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := types.Float64(value) + s.MaxScore = &f + case float64: + f := types.Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -563,23 +671,54 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]types.Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/core/searchtemplate/search_template.go b/typedapi/core/searchtemplate/search_template.go old mode 100755 new mode 100644 index d0bea3b935..0e56491ba2 --- a/typedapi/core/searchtemplate/search_template.go +++ b/typedapi/core/searchtemplate/search_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to use the Mustache language to pre-render a search definition. package searchtemplate @@ -215,7 +215,6 @@ func (r SearchTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/termsenum/request.go b/typedapi/core/termsenum/request.go old mode 100755 new mode 100644 index b4d03af66c..d1bd9a1b4a --- a/typedapi/core/termsenum/request.go +++ b/typedapi/core/termsenum/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package termsenum @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 type Request struct { // CaseInsensitive When true the provided search string is matched against index terms without diff --git a/typedapi/core/termsenum/response.go b/typedapi/core/termsenum/response.go old mode 100755 new mode 100644 index ab8ea3ade4..444987f706 --- a/typedapi/core/termsenum/response.go +++ b/typedapi/core/termsenum/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package termsenum @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 type Response struct { Complete bool `json:"complete"` diff --git a/typedapi/core/termsenum/terms_enum.go b/typedapi/core/termsenum/terms_enum.go old mode 100755 new mode 100644 index a991bb1421..9d0eb04e0b --- a/typedapi/core/termsenum/terms_enum.go +++ b/typedapi/core/termsenum/terms_enum.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // The terms enum API can be used to discover terms in the index that begin // with the provided string. It is designed for low-latency look-ups used in @@ -207,7 +207,6 @@ func (r TermsEnum) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/termvectors/request.go b/typedapi/core/termvectors/request.go old mode 100755 new mode 100644 index f07edc11aa..c41a64cbdf --- a/typedapi/core/termvectors/request.go +++ b/typedapi/core/termvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package termvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/TermVectorsRequest.ts#L33-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/TermVectorsRequest.ts#L33-L61 type Request struct { Doc json.RawMessage `json:"doc,omitempty"` Filter *types.TermVectorsFilter `json:"filter,omitempty"` diff --git a/typedapi/core/termvectors/response.go b/typedapi/core/termvectors/response.go old mode 100755 new mode 100644 index 9633f3ec53..6c20325802 --- a/typedapi/core/termvectors/response.go +++ b/typedapi/core/termvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package termvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 type Response struct { Found bool `json:"found"` diff --git a/typedapi/core/termvectors/termvectors.go b/typedapi/core/termvectors/termvectors.go old mode 100755 new mode 100644 index 89c304c753..8334708e9a --- a/typedapi/core/termvectors/termvectors.go +++ b/typedapi/core/termvectors/termvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information and statistics about terms in the fields of a particular // document. @@ -222,7 +222,6 @@ func (r Termvectors) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/update/request.go b/typedapi/core/update/request.go old mode 100755 new mode 100644 index 18139c1a8d..90b750d3fc --- a/typedapi/core/update/request.go +++ b/typedapi/core/update/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package update @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update/UpdateRequest.ts#L38-L151 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update/UpdateRequest.ts#L38-L151 type Request struct { // DetectNoop Set to false to disable setting 'result' in the response diff --git a/typedapi/core/update/response.go b/typedapi/core/update/response.go old mode 100755 new mode 100644 index 2934e7c4b6..fcbd7111a5 --- a/typedapi/core/update/response.go +++ b/typedapi/core/update/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package update @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update/UpdateResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update/UpdateResponse.ts#L27-L29 type Response struct { Get *types.InlineGet `json:"get,omitempty"` diff --git a/typedapi/core/update/update.go b/typedapi/core/update/update.go old mode 100755 new mode 100644 index 0f52bddaf8..fd4623f35f --- a/typedapi/core/update/update.go +++ b/typedapi/core/update/update.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates a document with a script or partial document. package update @@ -214,7 +214,6 @@ func (r Update) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/updatebyquery/request.go b/typedapi/core/updatebyquery/request.go old mode 100755 new mode 100644 index 863494b37d..43dba62ab2 --- a/typedapi/core/updatebyquery/request.go +++ b/typedapi/core/updatebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatebyquery @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L85 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L85 type Request struct { Conflicts *conflicts.Conflicts `json:"conflicts,omitempty"` MaxDocs *int64 `json:"max_docs,omitempty"` diff --git a/typedapi/core/updatebyquery/response.go b/typedapi/core/updatebyquery/response.go old mode 100755 new mode 100644 index c5721defe5..90e2eb2dfb --- a/typedapi/core/updatebyquery/response.go +++ b/typedapi/core/updatebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` diff --git a/typedapi/core/updatebyquery/update_by_query.go b/typedapi/core/updatebyquery/update_by_query.go old mode 100755 new mode 100644 index e9e6af2913..9aa11323ac --- a/typedapi/core/updatebyquery/update_by_query.go +++ b/typedapi/core/updatebyquery/update_by_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs an update on every document in the index without changing the // source, @@ -212,7 +212,6 @@ func (r UpdateByQuery) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/core/updatebyqueryrethrottle/response.go b/typedapi/core/updatebyqueryrethrottle/response.go old mode 100755 new mode 100644 index 37390e4969..89d6a3b92f --- a/typedapi/core/updatebyqueryrethrottle/response.go +++ b/typedapi/core/updatebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatebyqueryrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.UpdateByQueryRethrottleNode `json:"nodes"` diff --git a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go old mode 100755 new mode 100644 index 020ee6d16e..89ff440bb0 --- a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go +++ b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Changes the number of requests per second for a particular Update By Query // operation. @@ -172,7 +172,6 @@ func (r UpdateByQueryRethrottle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go old mode 100755 new mode 100644 index 8e7c8ecb60..a1fe81c5ee --- a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go +++ b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes the specified dangling index package deletedanglingindex @@ -169,7 +169,6 @@ func (r DeleteDanglingIndex) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/danglingindices/deletedanglingindex/response.go b/typedapi/danglingindices/deletedanglingindex/response.go old mode 100755 new mode 100644 index 6a7b2247a5..ca87ec43ec --- a/typedapi/danglingindices/deletedanglingindex/response.go +++ b/typedapi/danglingindices/deletedanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletedanglingindex // Response holds the response body struct for the package deletedanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go old mode 100755 new mode 100644 index ecb1cfa46c..ce3e65495e --- a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go +++ b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Imports the specified dangling index package importdanglingindex @@ -169,7 +169,6 @@ func (r ImportDanglingIndex) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/danglingindices/importdanglingindex/response.go b/typedapi/danglingindices/importdanglingindex/response.go old mode 100755 new mode 100644 index 8369731192..96eee74976 --- a/typedapi/danglingindices/importdanglingindex/response.go +++ b/typedapi/danglingindices/importdanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package importdanglingindex // Response holds the response body struct for the package importdanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go old mode 100755 new mode 100644 index d9ee31da8c..8def2b7a52 --- a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go +++ b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns all dangling indices. package listdanglingindices @@ -157,7 +157,6 @@ func (r ListDanglingIndices) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/danglingindices/listdanglingindices/response.go b/typedapi/danglingindices/listdanglingindices/response.go old mode 100755 new mode 100644 index 0c114fd065..972216d332 --- a/typedapi/danglingindices/listdanglingindices/response.go +++ b/typedapi/danglingindices/listdanglingindices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package listdanglingindices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package listdanglingindices // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 type Response struct { DanglingIndices []types.DanglingIndex `json:"dangling_indices"` diff --git a/typedapi/enrich/deletepolicy/delete_policy.go b/typedapi/enrich/deletepolicy/delete_policy.go old mode 100755 new mode 100644 index bce0062fa7..a175356342 --- a/typedapi/enrich/deletepolicy/delete_policy.go +++ b/typedapi/enrich/deletepolicy/delete_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing enrich policy and its enrich index. package deletepolicy @@ -170,7 +170,6 @@ func (r DeletePolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/enrich/deletepolicy/response.go b/typedapi/enrich/deletepolicy/response.go old mode 100755 new mode 100644 index b97879b8dd..eb72405c21 --- a/typedapi/enrich/deletepolicy/response.go +++ b/typedapi/enrich/deletepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletepolicy // Response holds the response body struct for the package deletepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/enrich/executepolicy/execute_policy.go b/typedapi/enrich/executepolicy/execute_policy.go old mode 100755 new mode 100644 index 488e7454d6..fe448933dd --- a/typedapi/enrich/executepolicy/execute_policy.go +++ b/typedapi/enrich/executepolicy/execute_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates the enrich index for an existing enrich policy. package executepolicy @@ -173,7 +173,6 @@ func (r ExecutePolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/enrich/executepolicy/response.go b/typedapi/enrich/executepolicy/response.go old mode 100755 new mode 100644 index 365d000b57..6283f85416 --- a/typedapi/enrich/executepolicy/response.go +++ b/typedapi/enrich/executepolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package executepolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package executepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 type Response struct { Status types.ExecuteEnrichPolicyStatus `json:"status"` diff --git a/typedapi/enrich/getpolicy/get_policy.go b/typedapi/enrich/getpolicy/get_policy.go old mode 100755 new mode 100644 index 17b0ec8013..5822afb1d1 --- a/typedapi/enrich/getpolicy/get_policy.go +++ b/typedapi/enrich/getpolicy/get_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets information about an enrich policy. package getpolicy @@ -175,7 +175,6 @@ func (r GetPolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/enrich/getpolicy/response.go b/typedapi/enrich/getpolicy/response.go old mode 100755 new mode 100644 index b6a2535f7e..f24e67dd6d --- a/typedapi/enrich/getpolicy/response.go +++ b/typedapi/enrich/getpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getpolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 type Response struct { Policies []types.Summary `json:"policies"` diff --git a/typedapi/enrich/putpolicy/put_policy.go b/typedapi/enrich/putpolicy/put_policy.go old mode 100755 new mode 100644 index e548c5c092..2d8e8d24e9 --- a/typedapi/enrich/putpolicy/put_policy.go +++ b/typedapi/enrich/putpolicy/put_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new enrich policy. package putpolicy @@ -205,7 +205,6 @@ func (r PutPolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/enrich/putpolicy/request.go b/typedapi/enrich/putpolicy/request.go old mode 100755 new mode 100644 index 2e3281016f..0c81a2556e --- a/typedapi/enrich/putpolicy/request.go +++ b/typedapi/enrich/putpolicy/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putpolicy @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L38 type Request struct { GeoMatch *types.EnrichPolicy `json:"geo_match,omitempty"` Match *types.EnrichPolicy `json:"match,omitempty"` diff --git a/typedapi/enrich/putpolicy/response.go b/typedapi/enrich/putpolicy/response.go old mode 100755 new mode 100644 index 7d5fa0dc65..b36cb404f5 --- a/typedapi/enrich/putpolicy/response.go +++ b/typedapi/enrich/putpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putpolicy // Response holds the response body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/enrich/stats/response.go b/typedapi/enrich/stats/response.go old mode 100755 new mode 100644 index 8757fb0ad6..319367c2c6 --- a/typedapi/enrich/stats/response.go +++ b/typedapi/enrich/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/stats/EnrichStatsResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/stats/EnrichStatsResponse.ts#L22-L29 type Response struct { CacheStats []types.CacheStats `json:"cache_stats,omitempty"` diff --git a/typedapi/enrich/stats/stats.go b/typedapi/enrich/stats/stats.go old mode 100755 new mode 100644 index a9dad714f3..2afe6e00b9 --- a/typedapi/enrich/stats/stats.go +++ b/typedapi/enrich/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets enrich coordinator statistics and information about enrich policies that // are currently executing. @@ -161,7 +161,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/eql/delete/delete.go b/typedapi/eql/delete/delete.go old mode 100755 new mode 100644 index 5f2d2b5abf..cc2f9565ca --- a/typedapi/eql/delete/delete.go +++ b/typedapi/eql/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an async EQL search by ID. If the search is still running, the search // request will be cancelled. Otherwise, the saved search results are deleted. @@ -172,7 +172,6 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/eql/delete/response.go b/typedapi/eql/delete/response.go old mode 100755 new mode 100644 index 51e8c1a2f5..c7dc8f0681 --- a/typedapi/eql/delete/response.go +++ b/typedapi/eql/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/eql/get/get.go b/typedapi/eql/get/get.go old mode 100755 new mode 100644 index 2a0bfc2acb..b2bb849b13 --- a/typedapi/eql/get/get.go +++ b/typedapi/eql/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns async results from previously executed Event Query Language (EQL) // search @@ -172,7 +172,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/eql/get/response.go b/typedapi/eql/get/response.go old mode 100755 new mode 100644 index 4110dd2a4d..6d66e12413 --- a/typedapi/eql/get/response.go +++ b/typedapi/eql/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/get/EqlGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/get/EqlGetResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/eql/getstatus/get_status.go b/typedapi/eql/getstatus/get_status.go old mode 100755 new mode 100644 index 29ffde9c99..f12e4a9a31 --- a/typedapi/eql/getstatus/get_status.go +++ b/typedapi/eql/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the status of a previously submitted async or stored Event Query // Language (EQL) search @@ -174,7 +174,6 @@ func (r GetStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/eql/getstatus/response.go b/typedapi/eql/getstatus/response.go old mode 100755 new mode 100644 index 7a689c07ee..dff7295448 --- a/typedapi/eql/getstatus/response.go +++ b/typedapi/eql/getstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getstatus // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 type Response struct { diff --git a/typedapi/eql/search/request.go b/typedapi/eql/search/request.go old mode 100755 new mode 100644 index cb60d03eb5..2f9c355863 --- a/typedapi/eql/search/request.go +++ b/typedapi/eql/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/search/EqlSearchRequest.ts#L28-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/search/EqlSearchRequest.ts#L28-L115 type Request struct { CaseSensitive *bool `json:"case_sensitive,omitempty"` // EventCategoryField Field containing the event classification, such as process, file, or network. @@ -48,7 +48,7 @@ type Request struct { // Query EQL query you wish to run. Query string `json:"query"` ResultPosition *resultposition.ResultPosition `json:"result_position,omitempty"` - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // Size For basic queries, the maximum number of matching events to return. Defaults // to 10 Size *uint `json:"size,omitempty"` diff --git a/typedapi/eql/search/response.go b/typedapi/eql/search/response.go old mode 100755 new mode 100644 index ca13629c96..dd145a03f4 --- a/typedapi/eql/search/response.go +++ b/typedapi/eql/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/search/EqlSearchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/search/EqlSearchResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/eql/search/search.go b/typedapi/eql/search/search.go old mode 100755 new mode 100644 index 999dd929f7..83a079080f --- a/typedapi/eql/search/search.go +++ b/typedapi/eql/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns results matching a query expressed in Event Query Language (EQL) package search @@ -206,7 +206,6 @@ func (r Search) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/features/getfeatures/get_features.go b/typedapi/features/getfeatures/get_features.go old mode 100755 new mode 100644 index 71497219fa..207d393b6c --- a/typedapi/features/getfeatures/get_features.go +++ b/typedapi/features/getfeatures/get_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets a list of features which can be included in snapshots using the // feature_states field when creating a snapshot @@ -159,7 +159,6 @@ func (r GetFeatures) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/features/getfeatures/response.go b/typedapi/features/getfeatures/response.go old mode 100755 new mode 100644 index 7dfddcf629..6862e87b0e --- a/typedapi/features/getfeatures/response.go +++ b/typedapi/features/getfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` diff --git a/typedapi/features/resetfeatures/reset_features.go b/typedapi/features/resetfeatures/reset_features.go old mode 100755 new mode 100644 index e65b05c56a..beed9caffb --- a/typedapi/features/resetfeatures/reset_features.go +++ b/typedapi/features/resetfeatures/reset_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Resets the internal state of features, usually by deleting system indices package resetfeatures @@ -159,7 +159,6 @@ func (r ResetFeatures) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/features/resetfeatures/response.go b/typedapi/features/resetfeatures/response.go old mode 100755 new mode 100644 index 4db6379de0..daebd4fe6b --- a/typedapi/features/resetfeatures/response.go +++ b/typedapi/features/resetfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resetfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resetfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` diff --git a/typedapi/fleet/globalcheckpoints/global_checkpoints.go b/typedapi/fleet/globalcheckpoints/global_checkpoints.go old mode 100755 new mode 100644 index b35983b2c0..ac2c2cac46 --- a/typedapi/fleet/globalcheckpoints/global_checkpoints.go +++ b/typedapi/fleet/globalcheckpoints/global_checkpoints.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the current global checkpoints for an index. This API is design for // internal use by the fleet server project. @@ -179,7 +179,6 @@ func (r GlobalCheckpoints) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/fleet/globalcheckpoints/response.go b/typedapi/fleet/globalcheckpoints/response.go old mode 100755 new mode 100644 index e680d7073e..857b278c29 --- a/typedapi/fleet/globalcheckpoints/response.go +++ b/typedapi/fleet/globalcheckpoints/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package globalcheckpoints // Response holds the response body struct for the package globalcheckpoints // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 type Response struct { GlobalCheckpoints []int64 `json:"global_checkpoints"` diff --git a/typedapi/fleet/search/request.go b/typedapi/fleet/search/request.go old mode 100755 new mode 100644 index e892c31e10..05af86d91c --- a/typedapi/fleet/search/request.go +++ b/typedapi/fleet/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/fleet/search/SearchRequest.ts#L55-L260 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/fleet/search/SearchRequest.ts#L55-L260 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -65,7 +65,7 @@ type Request struct { Rescore []types.Rescore `json:"rescore,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. These fields take // precedence over mapped fields with the same name. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Retrieve a script evaluation (based on different fields) for each hit. ScriptFields map[string]types.ScriptField `json:"script_fields,omitempty"` SearchAfter []types.FieldValue `json:"search_after,omitempty"` diff --git a/typedapi/fleet/search/response.go b/typedapi/fleet/search/response.go old mode 100755 new mode 100644 index a78060f9a3..e2fc678161 --- a/typedapi/fleet/search/response.go +++ b/typedapi/fleet/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package search @@ -25,6 +25,7 @@ import ( "encoding/json" "errors" "io" + "strconv" "strings" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/fleet/search/SearchResponse.ts#L33-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/fleet/search/SearchResponse.ts#L33-L50 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` @@ -76,6 +77,10 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -88,415 +93,494 @@ func (s *Response) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } switch elems[0] { + case "cardinality": o := types.NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := types.NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := types.NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := types.NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := types.NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := types.NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := types.NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := types.NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := types.NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := types.NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := types.NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := types.NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := types.NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := types.NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := types.NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := types.NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := types.NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := types.NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := types.NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := types.NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := types.NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := types.NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := types.NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := types.NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := types.NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := types.NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := types.NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := types.NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := types.NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := types.NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := types.NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := types.NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := types.NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := types.NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := types.NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := types.NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := types.NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := types.NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := types.NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := types.NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := types.NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := types.NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := types.NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := types.NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := types.NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := types.NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := types.NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := types.NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := types.NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := types.NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := types.NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := types.NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := types.NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := types.NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -523,6 +607,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -533,13 +620,34 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := types.Float64(value) + s.MaxScore = &f + case float64: + f := types.Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -563,23 +671,54 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]types.Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/fleet/search/search.go b/typedapi/fleet/search/search.go old mode 100755 new mode 100644 index 5d0d898fbd..6ffcbd4d9a --- a/typedapi/fleet/search/search.go +++ b/typedapi/fleet/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Search API where the search will only be executed after specified checkpoints // are available due to a refresh. This API is designed for internal use by the @@ -216,7 +216,6 @@ func (r Search) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/graph/explore/explore.go b/typedapi/graph/explore/explore.go old mode 100755 new mode 100644 index f3a0a6f077..f7394cef28 --- a/typedapi/graph/explore/explore.go +++ b/typedapi/graph/explore/explore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Explore extracted and summarized information about the documents and terms in // an index. @@ -207,7 +207,6 @@ func (r Explore) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/graph/explore/request.go b/typedapi/graph/explore/request.go old mode 100755 new mode 100644 index 3f8e534390..48eb8ba358 --- a/typedapi/graph/explore/request.go +++ b/typedapi/graph/explore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explore @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/explore/GraphExploreRequest.ts#L28-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/explore/GraphExploreRequest.ts#L28-L47 type Request struct { Connections *types.Hop `json:"connections,omitempty"` Controls *types.ExploreControls `json:"controls,omitempty"` diff --git a/typedapi/graph/explore/response.go b/typedapi/graph/explore/response.go old mode 100755 new mode 100644 index a824416339..db98ab609b --- a/typedapi/graph/explore/response.go +++ b/typedapi/graph/explore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/explore/GraphExploreResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/explore/GraphExploreResponse.ts#L25-L33 type Response struct { Connections []types.Connection `json:"connections"` diff --git a/typedapi/ilm/deletelifecycle/delete_lifecycle.go b/typedapi/ilm/deletelifecycle/delete_lifecycle.go old mode 100755 new mode 100644 index 3bc8ee1fc1..6c394d547b --- a/typedapi/ilm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/ilm/deletelifecycle/delete_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes the specified lifecycle policy definition. A currently used policy // cannot be deleted. @@ -172,7 +172,6 @@ func (r DeleteLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/deletelifecycle/response.go b/typedapi/ilm/deletelifecycle/response.go old mode 100755 new mode 100644 index ad77b9374e..7b3c532c51 --- a/typedapi/ilm/deletelifecycle/response.go +++ b/typedapi/ilm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/explainlifecycle/explain_lifecycle.go b/typedapi/ilm/explainlifecycle/explain_lifecycle.go old mode 100755 new mode 100644 index 3fa52b1a47..536f5eeef6 --- a/typedapi/ilm/explainlifecycle/explain_lifecycle.go +++ b/typedapi/ilm/explainlifecycle/explain_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the index's current lifecycle state, such as the // currently executing phase, action, and step. @@ -173,7 +173,6 @@ func (r ExplainLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/explainlifecycle/response.go b/typedapi/ilm/explainlifecycle/response.go old mode 100755 new mode 100644 index cbf1f89fd9..29b0802d2a --- a/typedapi/ilm/explainlifecycle/response.go +++ b/typedapi/ilm/explainlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explainlifecycle @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package explainlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 type Response struct { Indices map[string]types.LifecycleExplain `json:"indices"` @@ -60,6 +60,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "indices": + if s.Indices == nil { + s.Indices = make(map[string]types.LifecycleExplain, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -83,7 +86,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Indices[key] = oo default: - if err := dec.Decode(&s.Indices); err != nil { + if err := localDec.Decode(&s.Indices); err != nil { return err } } diff --git a/typedapi/ilm/getlifecycle/get_lifecycle.go b/typedapi/ilm/getlifecycle/get_lifecycle.go old mode 100755 new mode 100644 index b700aac3ed..29478f4622 --- a/typedapi/ilm/getlifecycle/get_lifecycle.go +++ b/typedapi/ilm/getlifecycle/get_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the specified policy definition. Includes the policy version and last // modified date. @@ -177,7 +177,6 @@ func (r GetLifecycle) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/getlifecycle/response.go b/typedapi/ilm/getlifecycle/response.go old mode 100755 new mode 100644 index e83ef188f4..470b0ef21a --- a/typedapi/ilm/getlifecycle/response.go +++ b/typedapi/ilm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L25 type Response map[string]types.Lifecycle diff --git a/typedapi/ilm/getstatus/get_status.go b/typedapi/ilm/getstatus/get_status.go old mode 100755 new mode 100644 index 78bdd3a0d3..84d64a3236 --- a/typedapi/ilm/getstatus/get_status.go +++ b/typedapi/ilm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the current index lifecycle management (ILM) status. package getstatus @@ -159,7 +159,6 @@ func (r GetStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/getstatus/response.go b/typedapi/ilm/getstatus/response.go old mode 100755 new mode 100644 index 09af4174e9..720f00a429 --- a/typedapi/ilm/getstatus/response.go +++ b/typedapi/ilm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` diff --git a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go old mode 100755 new mode 100644 index 1f303e477f..46645f3227 --- a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go +++ b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Migrates the indices and ILM policies away from custom node attribute // allocation routing to data tiers routing @@ -197,7 +197,6 @@ func (r MigrateToDataTiers) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/migratetodatatiers/request.go b/typedapi/ilm/migratetodatatiers/request.go old mode 100755 new mode 100644 index e926af1361..6cc5066d31 --- a/typedapi/ilm/migratetodatatiers/request.go +++ b/typedapi/ilm/migratetodatatiers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package migratetodatatiers @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L44 type Request struct { LegacyTemplateToDelete *string `json:"legacy_template_to_delete,omitempty"` NodeAttribute *string `json:"node_attribute,omitempty"` diff --git a/typedapi/ilm/migratetodatatiers/response.go b/typedapi/ilm/migratetodatatiers/response.go old mode 100755 new mode 100644 index 0fbce595a2..9f49cbccac --- a/typedapi/ilm/migratetodatatiers/response.go +++ b/typedapi/ilm/migratetodatatiers/response.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package migratetodatatiers +import ( + "bytes" + "encoding/json" + "errors" + "io" + "strconv" +) + // Response holds the response body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 type Response struct { DryRun bool `json:"dry_run"` @@ -39,3 +47,80 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dry_run": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DryRun = value + case bool: + s.DryRun = v + } + + case "migrated_component_templates": + if err := dec.Decode(&s.MigratedComponentTemplates); err != nil { + return err + } + + case "migrated_composable_templates": + if err := dec.Decode(&s.MigratedComposableTemplates); err != nil { + return err + } + + case "migrated_ilm_policies": + if err := dec.Decode(&s.MigratedIlmPolicies); err != nil { + return err + } + + case "migrated_indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.MigratedIndices = append(s.MigratedIndices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MigratedIndices); err != nil { + return err + } + } + + case "migrated_legacy_templates": + if err := dec.Decode(&s.MigratedLegacyTemplates); err != nil { + return err + } + + case "removed_legacy_template": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RemovedLegacyTemplate = o + + } + } + return nil +} diff --git a/typedapi/ilm/movetostep/move_to_step.go b/typedapi/ilm/movetostep/move_to_step.go old mode 100755 new mode 100644 index 30e28c2498..3f3a022add --- a/typedapi/ilm/movetostep/move_to_step.go +++ b/typedapi/ilm/movetostep/move_to_step.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Manually moves an index into the specified step and executes that step. package movetostep @@ -205,7 +205,6 @@ func (r MoveToStep) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/movetostep/request.go b/typedapi/ilm/movetostep/request.go old mode 100755 new mode 100644 index dfec105ea0..350766aa18 --- a/typedapi/ilm/movetostep/request.go +++ b/typedapi/ilm/movetostep/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package movetostep @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L37 type Request struct { CurrentStep *types.StepKey `json:"current_step,omitempty"` NextStep *types.StepKey `json:"next_step,omitempty"` diff --git a/typedapi/ilm/movetostep/response.go b/typedapi/ilm/movetostep/response.go old mode 100755 new mode 100644 index c28f521311..39c381089a --- a/typedapi/ilm/movetostep/response.go +++ b/typedapi/ilm/movetostep/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package movetostep // Response holds the response body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/putlifecycle/put_lifecycle.go b/typedapi/ilm/putlifecycle/put_lifecycle.go old mode 100755 new mode 100644 index 4f51e8b4d9..7449aef97e --- a/typedapi/ilm/putlifecycle/put_lifecycle.go +++ b/typedapi/ilm/putlifecycle/put_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a lifecycle policy package putlifecycle @@ -205,7 +205,6 @@ func (r PutLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/putlifecycle/request.go b/typedapi/ilm/putlifecycle/request.go old mode 100755 new mode 100644 index 17acd52d2e..80bde3536c --- a/typedapi/ilm/putlifecycle/request.go +++ b/typedapi/ilm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putlifecycle @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L56 type Request struct { Policy *types.IlmPolicy `json:"policy,omitempty"` } diff --git a/typedapi/ilm/putlifecycle/response.go b/typedapi/ilm/putlifecycle/response.go old mode 100755 new mode 100644 index c561bf02d1..afef96c8fc --- a/typedapi/ilm/putlifecycle/response.go +++ b/typedapi/ilm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/removepolicy/remove_policy.go b/typedapi/ilm/removepolicy/remove_policy.go old mode 100755 new mode 100644 index fc7ddb1d46..95d9db44b0 --- a/typedapi/ilm/removepolicy/remove_policy.go +++ b/typedapi/ilm/removepolicy/remove_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes the assigned lifecycle policy and stops managing the specified index package removepolicy @@ -170,7 +170,6 @@ func (r RemovePolicy) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/removepolicy/response.go b/typedapi/ilm/removepolicy/response.go old mode 100755 new mode 100644 index 124201695a..61e2144976 --- a/typedapi/ilm/removepolicy/response.go +++ b/typedapi/ilm/removepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package removepolicy // Response holds the response body struct for the package removepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 type Response struct { FailedIndexes []string `json:"failed_indexes"` diff --git a/typedapi/ilm/retry/response.go b/typedapi/ilm/retry/response.go old mode 100755 new mode 100644 index 8f1be3594f..33b7b887b6 --- a/typedapi/ilm/retry/response.go +++ b/typedapi/ilm/retry/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package retry // Response holds the response body struct for the package retry // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/retry/retry.go b/typedapi/ilm/retry/retry.go old mode 100755 new mode 100644 index 9eae3b8685..d7a8e123aa --- a/typedapi/ilm/retry/retry.go +++ b/typedapi/ilm/retry/retry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retries executing the policy for an index that is in the ERROR step. package retry @@ -170,7 +170,6 @@ func (r Retry) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/start/response.go b/typedapi/ilm/start/response.go old mode 100755 new mode 100644 index 2668df0b88..1d976c76dd --- a/typedapi/ilm/start/response.go +++ b/typedapi/ilm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/start/StartIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/start/StartIlmResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/start/start.go b/typedapi/ilm/start/start.go old mode 100755 new mode 100644 index ef3da4fe9a..003eeb9658 --- a/typedapi/ilm/start/start.go +++ b/typedapi/ilm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Start the index lifecycle management (ILM) plugin. package start @@ -159,7 +159,6 @@ func (r Start) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ilm/stop/response.go b/typedapi/ilm/stop/response.go old mode 100755 new mode 100644 index bf63dbe8e0..4edb297daf --- a/typedapi/ilm/stop/response.go +++ b/typedapi/ilm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/stop/StopIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/stop/StopIlmResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ilm/stop/stop.go b/typedapi/ilm/stop/stop.go old mode 100755 new mode 100644 index 801e83b8bf..95cef41878 --- a/typedapi/ilm/stop/stop.go +++ b/typedapi/ilm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Halts all lifecycle management operations and stops the index lifecycle // management (ILM) plugin @@ -161,7 +161,6 @@ func (r Stop) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/addblock/add_block.go b/typedapi/indices/addblock/add_block.go old mode 100755 new mode 100644 index 04b7b5fa56..1c30558f1d --- a/typedapi/indices/addblock/add_block.go +++ b/typedapi/indices/addblock/add_block.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds a block to an index. package addblock @@ -177,7 +177,6 @@ func (r AddBlock) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/addblock/response.go b/typedapi/indices/addblock/response.go old mode 100755 new mode 100644 index 205811d483..8d5fd85439 --- a/typedapi/indices/addblock/response.go +++ b/typedapi/indices/addblock/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package addblock @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package addblock // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/analyze/analyze.go b/typedapi/indices/analyze/analyze.go old mode 100755 new mode 100644 index a612c39536..ec15aa2ea9 --- a/typedapi/indices/analyze/analyze.go +++ b/typedapi/indices/analyze/analyze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs the analysis process on a text and return the tokens breakdown of // the text. @@ -208,7 +208,6 @@ func (r Analyze) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/analyze/request.go b/typedapi/indices/analyze/request.go old mode 100755 new mode 100644 index 0f66d54430..9cd87aedf6 --- a/typedapi/indices/analyze/request.go +++ b/typedapi/indices/analyze/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package analyze @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L47 type Request struct { Analyzer *string `json:"analyzer,omitempty"` Attributes []string `json:"attributes,omitempty"` diff --git a/typedapi/indices/analyze/response.go b/typedapi/indices/analyze/response.go old mode 100755 new mode 100644 index 8e4ab9e596..3321df25a1 --- a/typedapi/indices/analyze/response.go +++ b/typedapi/indices/analyze/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package analyze @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 type Response struct { Detail *types.AnalyzeDetail `json:"detail,omitempty"` diff --git a/typedapi/indices/clearcache/clear_cache.go b/typedapi/indices/clearcache/clear_cache.go old mode 100755 new mode 100644 index 041b2f7943..4afded49db --- a/typedapi/indices/clearcache/clear_cache.go +++ b/typedapi/indices/clearcache/clear_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clears all or specific caches for one or more indices. package clearcache @@ -176,7 +176,6 @@ func (r ClearCache) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/clearcache/response.go b/typedapi/indices/clearcache/response.go old mode 100755 new mode 100644 index 3927601b28..f284c83f11 --- a/typedapi/indices/clearcache/response.go +++ b/typedapi/indices/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/clone/clone.go b/typedapi/indices/clone/clone.go old mode 100755 new mode 100644 index 5dc0069a2d..001cde2763 --- a/typedapi/indices/clone/clone.go +++ b/typedapi/indices/clone/clone.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clones an index package clone @@ -211,7 +211,6 @@ func (r Clone) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/clone/request.go b/typedapi/indices/clone/request.go old mode 100755 new mode 100644 index 189f5284df..41beeb1938 --- a/typedapi/indices/clone/request.go +++ b/typedapi/indices/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clone @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/clone/IndicesCloneRequest.ts#L27-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/clone/IndicesCloneRequest.ts#L27-L46 type Request struct { Aliases map[string]types.Alias `json:"aliases,omitempty"` Settings map[string]json.RawMessage `json:"settings,omitempty"` diff --git a/typedapi/indices/clone/response.go b/typedapi/indices/clone/response.go old mode 100755 new mode 100644 index c54145a640..3b5b74575d --- a/typedapi/indices/clone/response.go +++ b/typedapi/indices/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/close/close.go b/typedapi/indices/close/close.go old mode 100755 new mode 100644 index d94ae45cf8..039ac4d258 --- a/typedapi/indices/close/close.go +++ b/typedapi/indices/close/close.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Closes an index. package close @@ -169,7 +169,6 @@ func (r Close) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/close/response.go b/typedapi/indices/close/response.go old mode 100755 new mode 100644 index 8db61b08fe..7c60151845 --- a/typedapi/indices/close/response.go +++ b/typedapi/indices/close/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package close @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package close // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/close/CloseIndexResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/close/CloseIndexResponse.ts#L24-L30 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/create/create.go b/typedapi/indices/create/create.go old mode 100755 new mode 100644 index 28c11a8071..8eb0562620 --- a/typedapi/indices/create/create.go +++ b/typedapi/indices/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates an index with optional settings and mappings. package create @@ -201,7 +201,6 @@ func (r Create) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/create/request.go b/typedapi/indices/create/request.go old mode 100755 new mode 100644 index 7125794e72..4fde5c97ca --- a/typedapi/indices/create/request.go +++ b/typedapi/indices/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package create @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/create/IndicesCreateRequest.ts#L28-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/create/IndicesCreateRequest.ts#L28-L56 type Request struct { Aliases map[string]types.Alias `json:"aliases,omitempty"` // Mappings Mapping for fields in the index. If specified, this mapping can include: diff --git a/typedapi/indices/create/response.go b/typedapi/indices/create/response.go old mode 100755 new mode 100644 index 7d3b4bf66a..f755d00d19 --- a/typedapi/indices/create/response.go +++ b/typedapi/indices/create/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package create // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/create/IndicesCreateResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/create/IndicesCreateResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/createdatastream/create_data_stream.go b/typedapi/indices/createdatastream/create_data_stream.go old mode 100755 new mode 100644 index 53933e2ed0..af5b460b10 --- a/typedapi/indices/createdatastream/create_data_stream.go +++ b/typedapi/indices/createdatastream/create_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a data stream package createdatastream @@ -168,7 +168,6 @@ func (r CreateDataStream) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/createdatastream/response.go b/typedapi/indices/createdatastream/response.go old mode 100755 new mode 100644 index a14f10d634..45f15faa91 --- a/typedapi/indices/createdatastream/response.go +++ b/typedapi/indices/createdatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createdatastream // Response holds the response body struct for the package createdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/datastreamsstats/data_streams_stats.go b/typedapi/indices/datastreamsstats/data_streams_stats.go old mode 100755 new mode 100644 index 772fe6195f..937298bdd4 --- a/typedapi/indices/datastreamsstats/data_streams_stats.go +++ b/typedapi/indices/datastreamsstats/data_streams_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides statistics on operations happening in a data stream. package datastreamsstats @@ -175,7 +175,6 @@ func (r DataStreamsStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/datastreamsstats/response.go b/typedapi/indices/datastreamsstats/response.go old mode 100755 new mode 100644 index a09b1448ff..01b9584fab --- a/typedapi/indices/datastreamsstats/response.go +++ b/typedapi/indices/datastreamsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package datastreamsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package datastreamsstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L34 type Response struct { BackingIndices int `json:"backing_indices"` diff --git a/typedapi/indices/delete/delete.go b/typedapi/indices/delete/delete.go old mode 100755 new mode 100644 index 4f4d279349..1e77252388 --- a/typedapi/indices/delete/delete.go +++ b/typedapi/indices/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an index. package delete @@ -167,7 +167,6 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/delete/response.go b/typedapi/indices/delete/response.go old mode 100755 new mode 100644 index 6f3b10447f..7a175de00c --- a/typedapi/indices/delete/response.go +++ b/typedapi/indices/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 type Response struct { Shards_ *types.ShardStatistics `json:"_shards,omitempty"` diff --git a/typedapi/indices/deletealias/delete_alias.go b/typedapi/indices/deletealias/delete_alias.go old mode 100755 new mode 100644 index 11973f8568..5fd75a8389 --- a/typedapi/indices/deletealias/delete_alias.go +++ b/typedapi/indices/deletealias/delete_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an alias. package deletealias @@ -187,7 +187,6 @@ func (r DeleteAlias) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/deletealias/response.go b/typedapi/indices/deletealias/response.go old mode 100755 new mode 100644 index e4687384f7..6b4d5f5739 --- a/typedapi/indices/deletealias/response.go +++ b/typedapi/indices/deletealias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletealias // Response holds the response body struct for the package deletealias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/deletedatastream/delete_data_stream.go b/typedapi/indices/deletedatastream/delete_data_stream.go old mode 100755 new mode 100644 index 98d1e08ef7..0fbd0292f1 --- a/typedapi/indices/deletedatastream/delete_data_stream.go +++ b/typedapi/indices/deletedatastream/delete_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a data stream. package deletedatastream @@ -168,7 +168,6 @@ func (r DeleteDataStream) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/deletedatastream/response.go b/typedapi/indices/deletedatastream/response.go old mode 100755 new mode 100644 index 16235f837c..d4ecda2ae5 --- a/typedapi/indices/deletedatastream/response.go +++ b/typedapi/indices/deletedatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletedatastream // Response holds the response body struct for the package deletedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/deleteindextemplate/delete_index_template.go b/typedapi/indices/deleteindextemplate/delete_index_template.go old mode 100755 new mode 100644 index 125de3e5f1..22015f726a --- a/typedapi/indices/deleteindextemplate/delete_index_template.go +++ b/typedapi/indices/deleteindextemplate/delete_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an index template. package deleteindextemplate @@ -168,7 +168,6 @@ func (r DeleteIndexTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/deleteindextemplate/response.go b/typedapi/indices/deleteindextemplate/response.go old mode 100755 new mode 100644 index f7a1761bd4..146ab50e0c --- a/typedapi/indices/deleteindextemplate/response.go +++ b/typedapi/indices/deleteindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteindextemplate // Response holds the response body struct for the package deleteindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/deletetemplate/delete_template.go b/typedapi/indices/deletetemplate/delete_template.go old mode 100755 new mode 100644 index 45900a3169..a3676fbba0 --- a/typedapi/indices/deletetemplate/delete_template.go +++ b/typedapi/indices/deletetemplate/delete_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an index template. package deletetemplate @@ -168,7 +168,6 @@ func (r DeleteTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/deletetemplate/response.go b/typedapi/indices/deletetemplate/response.go old mode 100755 new mode 100644 index 0e964bedc6..a3e8e05fef --- a/typedapi/indices/deletetemplate/response.go +++ b/typedapi/indices/deletetemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletetemplate // Response holds the response body struct for the package deletetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/diskusage/disk_usage.go b/typedapi/indices/diskusage/disk_usage.go old mode 100755 new mode 100644 index 9bfe8b6c15..1fb1290e69 --- a/typedapi/indices/diskusage/disk_usage.go +++ b/typedapi/indices/diskusage/disk_usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Analyzes the disk usage of each field of an index or data stream package diskusage @@ -169,7 +169,6 @@ func (r DiskUsage) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/diskusage/response.go b/typedapi/indices/diskusage/response.go old mode 100755 new mode 100644 index cdf480b447..45c2297efc --- a/typedapi/indices/diskusage/response.go +++ b/typedapi/indices/diskusage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package diskusage @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package diskusage // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L24 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/indices/downsample/downsample.go b/typedapi/indices/downsample/downsample.go old mode 100755 new mode 100644 index 9188b9386e..c51b4e7ff2 --- a/typedapi/indices/downsample/downsample.go +++ b/typedapi/indices/downsample/downsample.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Downsample an index package downsample @@ -211,7 +211,6 @@ func (r Downsample) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/downsample/response.go b/typedapi/indices/downsample/response.go old mode 100755 new mode 100644 index e129163988..5cde19642f --- a/typedapi/indices/downsample/response.go +++ b/typedapi/indices/downsample/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package downsample @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package downsample // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/downsample/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/downsample/Response.ts#L22-L24 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/indices/exists/exists.go b/typedapi/indices/exists/exists.go old mode 100755 new mode 100644 index 80ca01d448..22a8082f25 --- a/typedapi/indices/exists/exists.go +++ b/typedapi/indices/exists/exists.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a particular index exists. package exists @@ -24,7 +24,6 @@ package exists import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -149,36 +147,6 @@ func (r Exists) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a exists.Response -func (r Exists) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r Exists) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/indices/existsalias/exists_alias.go b/typedapi/indices/existsalias/exists_alias.go old mode 100755 new mode 100644 index 2b468fc65f..bbe5a267d3 --- a/typedapi/indices/existsalias/exists_alias.go +++ b/typedapi/indices/existsalias/exists_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a particular alias exists. package existsalias @@ -24,7 +24,6 @@ package existsalias import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -165,36 +163,6 @@ func (r ExistsAlias) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a existsalias.Response -func (r ExistsAlias) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r ExistsAlias) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/indices/existsalias/response.go b/typedapi/indices/existsalias/response.go deleted file mode 100755 index 430f8534c6..0000000000 --- a/typedapi/indices/existsalias/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package existsalias - -// Response holds the response body struct for the package existsalias -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/exists_alias/IndicesExistsAliasResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/indices/existsindextemplate/exists_index_template.go b/typedapi/indices/existsindextemplate/exists_index_template.go old mode 100755 new mode 100644 index 0c0a48973b..338b3f7b1e --- a/typedapi/indices/existsindextemplate/exists_index_template.go +++ b/typedapi/indices/existsindextemplate/exists_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a particular index template exists. package existsindextemplate @@ -24,7 +24,6 @@ package existsindextemplate import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -34,7 +33,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -150,36 +148,6 @@ func (r ExistsIndexTemplate) Perform(ctx context.Context) (*http.Response, error return res, nil } -// Do runs the request through the transport, handle the response and returns a existsindextemplate.Response -func (r ExistsIndexTemplate) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r ExistsIndexTemplate) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/indices/existsindextemplate/response.go b/typedapi/indices/existsindextemplate/response.go deleted file mode 100755 index 559593b635..0000000000 --- a/typedapi/indices/existsindextemplate/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package existsindextemplate - -// Response holds the response body struct for the package existsindextemplate -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/exists_index_template/IndicesExistsIndexTemplateResponse.ts#L22-L29 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/indices/existstemplate/exists_template.go b/typedapi/indices/existstemplate/exists_template.go old mode 100755 new mode 100644 index 9d9c2a0ef6..9ee6b37784 --- a/typedapi/indices/existstemplate/exists_template.go +++ b/typedapi/indices/existstemplate/exists_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about whether a particular index template exists. package existstemplate @@ -24,7 +24,6 @@ package existstemplate import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -35,7 +34,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -151,36 +149,6 @@ func (r ExistsTemplate) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a existstemplate.Response -func (r ExistsTemplate) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r ExistsTemplate) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/indices/existstemplate/response.go b/typedapi/indices/existstemplate/response.go deleted file mode 100755 index f4bfc19ae2..0000000000 --- a/typedapi/indices/existstemplate/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package existstemplate - -// Response holds the response body struct for the package existstemplate -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/exists_template/IndicesExistsTemplateResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/indices/fieldusagestats/field_usage_stats.go b/typedapi/indices/fieldusagestats/field_usage_stats.go old mode 100755 new mode 100644 index 483fb53255..6c6f5379fe --- a/typedapi/indices/fieldusagestats/field_usage_stats.go +++ b/typedapi/indices/fieldusagestats/field_usage_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the field usage stats for each field of an index package fieldusagestats @@ -169,7 +169,6 @@ func (r FieldUsageStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/fieldusagestats/response.go b/typedapi/indices/fieldusagestats/response.go old mode 100755 new mode 100644 index 5565d39927..f004a5e8e9 --- a/typedapi/indices/fieldusagestats/response.go +++ b/typedapi/indices/fieldusagestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package fieldusagestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fieldusagestats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 type Response struct { FieldsUsageBody map[string]types.UsageStatsIndex `json:"-"` diff --git a/typedapi/indices/flush/flush.go b/typedapi/indices/flush/flush.go old mode 100755 new mode 100644 index c5c8828fe3..1b29927279 --- a/typedapi/indices/flush/flush.go +++ b/typedapi/indices/flush/flush.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs the flush operation on one or more indices. package flush @@ -172,7 +172,6 @@ func (r Flush) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/flush/response.go b/typedapi/indices/flush/response.go old mode 100755 new mode 100644 index 24d1f7f794..7b3b7291c7 --- a/typedapi/indices/flush/response.go +++ b/typedapi/indices/flush/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package flush @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package flush // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/forcemerge/forcemerge.go b/typedapi/indices/forcemerge/forcemerge.go old mode 100755 new mode 100644 index 172f675356..8bd9caf7b1 --- a/typedapi/indices/forcemerge/forcemerge.go +++ b/typedapi/indices/forcemerge/forcemerge.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs the force merge operation on one or more indices. package forcemerge @@ -172,7 +172,6 @@ func (r Forcemerge) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/forcemerge/response.go b/typedapi/indices/forcemerge/response.go old mode 100755 new mode 100644 index 3b72d3eb3a..ed1bb672e5 --- a/typedapi/indices/forcemerge/response.go +++ b/typedapi/indices/forcemerge/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package forcemerge // Response holds the response body struct for the package forcemerge // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/get/get.go b/typedapi/indices/get/get.go old mode 100755 new mode 100644 index 976def3ce1..82e2284634 --- a/typedapi/indices/get/get.go +++ b/typedapi/indices/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about one or more indices. package get @@ -167,7 +167,6 @@ func (r Get) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/get/response.go b/typedapi/indices/get/response.go old mode 100755 new mode 100644 index 3b3a58b804..bcd33cb9b2 --- a/typedapi/indices/get/response.go +++ b/typedapi/indices/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get/IndicesGetResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get/IndicesGetResponse.ts#L24-L26 type Response map[string]types.IndexState diff --git a/typedapi/indices/getalias/get_alias.go b/typedapi/indices/getalias/get_alias.go old mode 100755 new mode 100644 index 7987522ee7..156b38497e --- a/typedapi/indices/getalias/get_alias.go +++ b/typedapi/indices/getalias/get_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns an alias. package getalias @@ -187,14 +187,13 @@ func (r GetAlias) Do(ctx context.Context) (Response, error) { } defer res.Body.Close() - if res.StatusCode < 299 { + if res.StatusCode < 299 || res.StatusCode == 404 { err = json.NewDecoder(res.Body).Decode(&response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getalias/response.go b/typedapi/indices/getalias/response.go old mode 100755 new mode 100644 index b3bdb0c369..6cee3b81ff --- a/typedapi/indices/getalias/response.go +++ b/typedapi/indices/getalias/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getalias @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getalias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L34 type Response map[string]types.IndexAliases diff --git a/typedapi/indices/getdatastream/get_data_stream.go b/typedapi/indices/getdatastream/get_data_stream.go old mode 100755 new mode 100644 index 125c4ef409..9305dacbe8 --- a/typedapi/indices/getdatastream/get_data_stream.go +++ b/typedapi/indices/getdatastream/get_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns data streams. package getdatastream @@ -171,7 +171,6 @@ func (r GetDataStream) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getdatastream/response.go b/typedapi/indices/getdatastream/response.go old mode 100755 new mode 100644 index 0fab685aff..3356d797f8 --- a/typedapi/indices/getdatastream/response.go +++ b/typedapi/indices/getdatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getdatastream @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 type Response struct { DataStreams []types.DataStream `json:"data_streams"` diff --git a/typedapi/indices/getfieldmapping/get_field_mapping.go b/typedapi/indices/getfieldmapping/get_field_mapping.go old mode 100755 new mode 100644 index 328949ff64..7bc0dcec21 --- a/typedapi/indices/getfieldmapping/get_field_mapping.go +++ b/typedapi/indices/getfieldmapping/get_field_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns mapping for one or more fields. package getfieldmapping @@ -187,7 +187,6 @@ func (r GetFieldMapping) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getfieldmapping/response.go b/typedapi/indices/getfieldmapping/response.go old mode 100755 new mode 100644 index 8744240af6..a8a306fcd9 --- a/typedapi/indices/getfieldmapping/response.go +++ b/typedapi/indices/getfieldmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getfieldmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfieldmapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L26 type Response map[string]types.TypeFieldMappings diff --git a/typedapi/indices/getindextemplate/get_index_template.go b/typedapi/indices/getindextemplate/get_index_template.go old mode 100755 new mode 100644 index 79f246b62e..5094d0ea55 --- a/typedapi/indices/getindextemplate/get_index_template.go +++ b/typedapi/indices/getindextemplate/get_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns an index template. package getindextemplate @@ -172,7 +172,6 @@ func (r GetIndexTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getindextemplate/response.go b/typedapi/indices/getindextemplate/response.go old mode 100755 new mode 100644 index 4775cab1a5..c8e2af5601 --- a/typedapi/indices/getindextemplate/response.go +++ b/typedapi/indices/getindextemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getindextemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 type Response struct { IndexTemplates []types.IndexTemplateItem `json:"index_templates"` diff --git a/typedapi/indices/getmapping/get_mapping.go b/typedapi/indices/getmapping/get_mapping.go old mode 100755 new mode 100644 index 169e15dc6a..cc64030420 --- a/typedapi/indices/getmapping/get_mapping.go +++ b/typedapi/indices/getmapping/get_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns mappings for one or more indices. package getmapping @@ -172,7 +172,6 @@ func (r GetMapping) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getmapping/response.go b/typedapi/indices/getmapping/response.go old mode 100755 new mode 100644 index 9e827f51e8..7dc109f444 --- a/typedapi/indices/getmapping/response.go +++ b/typedapi/indices/getmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L26 type Response map[string]types.IndexMappingRecord diff --git a/typedapi/indices/getsettings/get_settings.go b/typedapi/indices/getsettings/get_settings.go old mode 100755 new mode 100644 index 38c5ce6953..0be9bd198f --- a/typedapi/indices/getsettings/get_settings.go +++ b/typedapi/indices/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns settings for one or more indices. package getsettings @@ -194,7 +194,6 @@ func (r GetSettings) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/getsettings/response.go b/typedapi/indices/getsettings/response.go old mode 100755 new mode 100644 index 5eaf38c846..88e4f2a69b --- a/typedapi/indices/getsettings/response.go +++ b/typedapi/indices/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L26 type Response map[string]types.IndexState diff --git a/typedapi/indices/gettemplate/get_template.go b/typedapi/indices/gettemplate/get_template.go old mode 100755 new mode 100644 index 99b3c218c2..7b6a26be52 --- a/typedapi/indices/gettemplate/get_template.go +++ b/typedapi/indices/gettemplate/get_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns an index template. package gettemplate @@ -172,7 +172,6 @@ func (r GetTemplate) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/gettemplate/response.go b/typedapi/indices/gettemplate/response.go old mode 100755 new mode 100644 index 2d460c3a80..91475ea16f --- a/typedapi/indices/gettemplate/response.go +++ b/typedapi/indices/gettemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L25 type Response map[string]types.TemplateMapping diff --git a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go old mode 100755 new mode 100644 index b348231136..7638a42242 --- a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go +++ b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Migrates an alias to a data stream package migratetodatastream @@ -170,7 +170,6 @@ func (r MigrateToDataStream) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/migratetodatastream/response.go b/typedapi/indices/migratetodatastream/response.go old mode 100755 new mode 100644 index 709de6b0d6..477402c6ea --- a/typedapi/indices/migratetodatastream/response.go +++ b/typedapi/indices/migratetodatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package migratetodatastream // Response holds the response body struct for the package migratetodatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/modifydatastream/modify_data_stream.go b/typedapi/indices/modifydatastream/modify_data_stream.go old mode 100755 new mode 100644 index cb39f06e2d..dde9bced41 --- a/typedapi/indices/modifydatastream/modify_data_stream.go +++ b/typedapi/indices/modifydatastream/modify_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Modifies a data stream package modifydatastream @@ -194,7 +194,6 @@ func (r ModifyDataStream) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/modifydatastream/request.go b/typedapi/indices/modifydatastream/request.go old mode 100755 new mode 100644 index 7cd17e811b..faf547b080 --- a/typedapi/indices/modifydatastream/request.go +++ b/typedapi/indices/modifydatastream/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package modifydatastream @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L35 type Request struct { // Actions Actions to perform. diff --git a/typedapi/indices/modifydatastream/response.go b/typedapi/indices/modifydatastream/response.go old mode 100755 new mode 100644 index 9803a6ee3b..fc12000b37 --- a/typedapi/indices/modifydatastream/response.go +++ b/typedapi/indices/modifydatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package modifydatastream // Response holds the response body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/open/open.go b/typedapi/indices/open/open.go old mode 100755 new mode 100644 index 76089307de..b5d82c7ef1 --- a/typedapi/indices/open/open.go +++ b/typedapi/indices/open/open.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Opens an index. package open @@ -169,7 +169,6 @@ func (r Open) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/open/response.go b/typedapi/indices/open/response.go old mode 100755 new mode 100644 index b1fdeb6c9c..1efdbf1137 --- a/typedapi/indices/open/response.go +++ b/typedapi/indices/open/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package open // Response holds the response body struct for the package open // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/open/IndicesOpenResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/open/IndicesOpenResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/promotedatastream/promote_data_stream.go b/typedapi/indices/promotedatastream/promote_data_stream.go old mode 100755 new mode 100644 index aa712ee829..70e520e97a --- a/typedapi/indices/promotedatastream/promote_data_stream.go +++ b/typedapi/indices/promotedatastream/promote_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Promotes a data stream from a replicated data stream managed by CCR to a // regular data stream @@ -172,7 +172,6 @@ func (r PromoteDataStream) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/promotedatastream/response.go b/typedapi/indices/promotedatastream/response.go old mode 100755 new mode 100644 index a496c3309e..fb3a2615af --- a/typedapi/indices/promotedatastream/response.go +++ b/typedapi/indices/promotedatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package promotedatastream @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package promotedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L24 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/indices/putalias/put_alias.go b/typedapi/indices/putalias/put_alias.go old mode 100755 new mode 100644 index ccb1cc6da9..288e94c2e7 --- a/typedapi/indices/putalias/put_alias.go +++ b/typedapi/indices/putalias/put_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates an alias. package putalias @@ -222,7 +222,6 @@ func (r PutAlias) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/putalias/request.go b/typedapi/indices/putalias/request.go old mode 100755 new mode 100644 index 4af457dd08..ea42e5819c --- a/typedapi/indices/putalias/request.go +++ b/typedapi/indices/putalias/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putalias @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L46 type Request struct { Filter *types.Query `json:"filter,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` diff --git a/typedapi/indices/putalias/response.go b/typedapi/indices/putalias/response.go old mode 100755 new mode 100644 index f42fdfaddf..5c0c9c8a7a --- a/typedapi/indices/putalias/response.go +++ b/typedapi/indices/putalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putalias // Response holds the response body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/putindextemplate/put_index_template.go b/typedapi/indices/putindextemplate/put_index_template.go old mode 100755 new mode 100644 index c1ccf7ac9f..ba98b1520b --- a/typedapi/indices/putindextemplate/put_index_template.go +++ b/typedapi/indices/putindextemplate/put_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates an index template. package putindextemplate @@ -204,7 +204,6 @@ func (r PutIndexTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/putindextemplate/request.go b/typedapi/indices/putindextemplate/request.go old mode 100755 new mode 100644 index df2e1d2d51..c09ab1ff86 --- a/typedapi/indices/putindextemplate/request.go +++ b/typedapi/indices/putindextemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putindextemplate @@ -29,12 +29,12 @@ import ( // Request holds the request body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L35-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L35-L58 type Request struct { ComposedOf []string `json:"composed_of,omitempty"` DataStream *types.DataStreamVisibility `json:"data_stream,omitempty"` IndexPatterns []string `json:"index_patterns,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` Priority *int `json:"priority,omitempty"` Template *types.IndexTemplateMapping `json:"template,omitempty"` Version *int64 `json:"version,omitempty"` diff --git a/typedapi/indices/putindextemplate/response.go b/typedapi/indices/putindextemplate/response.go old mode 100755 new mode 100644 index 564573d60f..095019b8d4 --- a/typedapi/indices/putindextemplate/response.go +++ b/typedapi/indices/putindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putindextemplate // Response holds the response body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/putmapping/put_mapping.go b/typedapi/indices/putmapping/put_mapping.go old mode 100755 new mode 100644 index 3c963887aa..fa6efb1c2e --- a/typedapi/indices/putmapping/put_mapping.go +++ b/typedapi/indices/putmapping/put_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the index mappings. package putmapping @@ -204,7 +204,6 @@ func (r PutMapping) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/putmapping/request.go b/typedapi/indices/putmapping/request.go old mode 100755 new mode 100644 index 9c82c25f2f..e06a22e92f --- a/typedapi/indices/putmapping/request.go +++ b/typedapi/indices/putmapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putmapping @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L116 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L116 type Request struct { // DateDetection Controls whether dynamic date detection is enabled. @@ -48,7 +48,7 @@ type Request struct { // Meta_ A mapping type can have custom meta data associated with it. These are // not used at all by Elasticsearch, but can be used to store // application-specific metadata. - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` // NumericDetection Automatically map strings into numeric data types for all fields. NumericDetection *bool `json:"numeric_detection,omitempty"` // Properties Mapping for a field. For new fields, this mapping can include: @@ -60,7 +60,7 @@ type Request struct { // Routing_ Enable making a routing value required on indexed documents. Routing_ *types.RoutingField `json:"_routing,omitempty"` // Runtime Mapping of runtime fields for the index. - Runtime map[string]types.RuntimeField `json:"runtime,omitempty"` + Runtime types.RuntimeFields `json:"runtime,omitempty"` // Source_ Control whether the _source field is enabled on the index. Source_ *types.SourceField `json:"_source,omitempty"` } diff --git a/typedapi/indices/putmapping/response.go b/typedapi/indices/putmapping/response.go old mode 100755 new mode 100644 index eafc1d754d..7752508c95 --- a/typedapi/indices/putmapping/response.go +++ b/typedapi/indices/putmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 type Response struct { Shards_ *types.ShardStatistics `json:"_shards,omitempty"` diff --git a/typedapi/indices/putsettings/put_settings.go b/typedapi/indices/putsettings/put_settings.go old mode 100755 new mode 100644 index 6b68f23a5d..e862519da2 --- a/typedapi/indices/putsettings/put_settings.go +++ b/typedapi/indices/putsettings/put_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the index settings. package putsettings @@ -207,7 +207,6 @@ func (r PutSettings) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/putsettings/response.go b/typedapi/indices/putsettings/response.go old mode 100755 new mode 100644 index bfbe9775e7..4e6d1d7d12 --- a/typedapi/indices/putsettings/response.go +++ b/typedapi/indices/putsettings/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putsettings // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/puttemplate/put_template.go b/typedapi/indices/puttemplate/put_template.go old mode 100755 new mode 100644 index 1f618126a1..6e4afc754c --- a/typedapi/indices/puttemplate/put_template.go +++ b/typedapi/indices/puttemplate/put_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates an index template. package puttemplate @@ -204,7 +204,6 @@ func (r PutTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/puttemplate/request.go b/typedapi/indices/puttemplate/request.go old mode 100755 new mode 100644 index 0030b21601..fee1d55787 --- a/typedapi/indices/puttemplate/request.go +++ b/typedapi/indices/puttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttemplate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L93 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L93 type Request struct { // Aliases Aliases for the index. diff --git a/typedapi/indices/puttemplate/response.go b/typedapi/indices/puttemplate/response.go old mode 100755 new mode 100644 index 15d27ab8ba..4f00d55e50 --- a/typedapi/indices/puttemplate/response.go +++ b/typedapi/indices/puttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttemplate // Response holds the response body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/recovery/recovery.go b/typedapi/indices/recovery/recovery.go old mode 100755 new mode 100644 index 12e68004f8..4bcf6f33d1 --- a/typedapi/indices/recovery/recovery.go +++ b/typedapi/indices/recovery/recovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about ongoing index shard recoveries. package recovery @@ -172,7 +172,6 @@ func (r Recovery) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/recovery/response.go b/typedapi/indices/recovery/response.go old mode 100755 new mode 100644 index 9f9eb185e6..78ce101c7d --- a/typedapi/indices/recovery/response.go +++ b/typedapi/indices/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L26 type Response map[string]types.RecoveryStatus diff --git a/typedapi/indices/refresh/refresh.go b/typedapi/indices/refresh/refresh.go old mode 100755 new mode 100644 index 4759349f7b..c45d3ce712 --- a/typedapi/indices/refresh/refresh.go +++ b/typedapi/indices/refresh/refresh.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Performs the refresh operation in one or more indices. package refresh @@ -172,7 +172,6 @@ func (r Refresh) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/refresh/response.go b/typedapi/indices/refresh/response.go old mode 100755 new mode 100644 index 34b3f96c0c..ed635f4f7b --- a/typedapi/indices/refresh/response.go +++ b/typedapi/indices/refresh/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package refresh @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package refresh // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go old mode 100755 new mode 100644 index 6422a2e44c..7652f72572 --- a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go +++ b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Reloads an index's search analyzers and their resources. package reloadsearchanalyzers @@ -169,7 +169,6 @@ func (r ReloadSearchAnalyzers) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/reloadsearchanalyzers/response.go b/typedapi/indices/reloadsearchanalyzers/response.go old mode 100755 new mode 100644 index fe177524f6..7014afd2fd --- a/typedapi/indices/reloadsearchanalyzers/response.go +++ b/typedapi/indices/reloadsearchanalyzers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reloadsearchanalyzers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reloadsearchanalyzers // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L23-L25 type Response struct { ReloadDetails []types.ReloadDetails `json:"reload_details"` diff --git a/typedapi/indices/resolveindex/resolve_index.go b/typedapi/indices/resolveindex/resolve_index.go old mode 100755 new mode 100644 index 6c8e24a8f7..09fe039e20 --- a/typedapi/indices/resolveindex/resolve_index.go +++ b/typedapi/indices/resolveindex/resolve_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about any matching indices, aliases, and data streams package resolveindex @@ -170,7 +170,6 @@ func (r ResolveIndex) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/resolveindex/response.go b/typedapi/indices/resolveindex/response.go old mode 100755 new mode 100644 index 310fe1bd2a..f2b8eb0117 --- a/typedapi/indices/resolveindex/response.go +++ b/typedapi/indices/resolveindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resolveindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resolveindex // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 type Response struct { Aliases []types.ResolveIndexAliasItem `json:"aliases"` diff --git a/typedapi/indices/rollover/request.go b/typedapi/indices/rollover/request.go old mode 100755 new mode 100644 index a2d0de26de..fcdad48012 --- a/typedapi/indices/rollover/request.go +++ b/typedapi/indices/rollover/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rollover @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L51 type Request struct { Aliases map[string]types.Alias `json:"aliases,omitempty"` Conditions *types.RolloverConditions `json:"conditions,omitempty"` diff --git a/typedapi/indices/rollover/response.go b/typedapi/indices/rollover/response.go old mode 100755 new mode 100644 index d083894941..673b503600 --- a/typedapi/indices/rollover/response.go +++ b/typedapi/indices/rollover/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rollover // Response holds the response body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/rollover/rollover.go b/typedapi/indices/rollover/rollover.go old mode 100755 new mode 100644 index e95b5962df..87de97c1f5 --- a/typedapi/indices/rollover/rollover.go +++ b/typedapi/indices/rollover/rollover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates an alias to point to a new index when the existing index // is considered to be too large or too old. @@ -220,7 +220,6 @@ func (r Rollover) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/segments/response.go b/typedapi/indices/segments/response.go old mode 100755 new mode 100644 index 78a95b76c6..ad5f54ed6e --- a/typedapi/indices/segments/response.go +++ b/typedapi/indices/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 type Response struct { Indices map[string]types.IndexSegment `json:"indices"` diff --git a/typedapi/indices/segments/segments.go b/typedapi/indices/segments/segments.go old mode 100755 new mode 100644 index f2583c2a32..5b99a22481 --- a/typedapi/indices/segments/segments.go +++ b/typedapi/indices/segments/segments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides low-level information about segments in a Lucene index. package segments @@ -172,7 +172,6 @@ func (r Segments) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/shardstores/response.go b/typedapi/indices/shardstores/response.go old mode 100755 new mode 100644 index a4e0604f43..e3a30a966a --- a/typedapi/indices/shardstores/response.go +++ b/typedapi/indices/shardstores/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package shardstores @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shardstores // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 type Response struct { Indices map[string]types.IndicesShardStores `json:"indices"` diff --git a/typedapi/indices/shardstores/shard_stores.go b/typedapi/indices/shardstores/shard_stores.go old mode 100755 new mode 100644 index 6f67b3c2ee..d19ca3ed93 --- a/typedapi/indices/shardstores/shard_stores.go +++ b/typedapi/indices/shardstores/shard_stores.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides store information for shard copies of indices. package shardstores @@ -172,7 +172,6 @@ func (r ShardStores) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/shrink/request.go b/typedapi/indices/shrink/request.go old mode 100755 new mode 100644 index 6bc92dc901..856fd61538 --- a/typedapi/indices/shrink/request.go +++ b/typedapi/indices/shrink/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package shrink @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L46 type Request struct { Aliases map[string]types.Alias `json:"aliases,omitempty"` Settings map[string]json.RawMessage `json:"settings,omitempty"` diff --git a/typedapi/indices/shrink/response.go b/typedapi/indices/shrink/response.go old mode 100755 new mode 100644 index 8fe3398df8..7ddf08b1e2 --- a/typedapi/indices/shrink/response.go +++ b/typedapi/indices/shrink/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package shrink // Response holds the response body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/shrink/shrink.go b/typedapi/indices/shrink/shrink.go old mode 100755 new mode 100644 index 8c8ac32f62..e405502572 --- a/typedapi/indices/shrink/shrink.go +++ b/typedapi/indices/shrink/shrink.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allow to shrink an existing index into a new index with fewer primary shards. package shrink @@ -211,7 +211,6 @@ func (r Shrink) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/simulateindextemplate/request.go b/typedapi/indices/simulateindextemplate/request.go old mode 100755 new mode 100644 index 17b7b22a21..4194de90e9 --- a/typedapi/indices/simulateindextemplate/request.go +++ b/typedapi/indices/simulateindextemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package simulateindextemplate @@ -29,13 +29,13 @@ import ( // Request holds the request body struct for the package simulateindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateRequest.ts#L33-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateRequest.ts#L33-L71 type Request struct { AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` ComposedOf []string `json:"composed_of,omitempty"` DataStream *types.DataStreamVisibility `json:"data_stream,omitempty"` IndexPatterns []string `json:"index_patterns,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` Priority *int `json:"priority,omitempty"` Template *types.IndexTemplateMapping `json:"template,omitempty"` Version *int64 `json:"version,omitempty"` diff --git a/typedapi/indices/simulateindextemplate/response.go b/typedapi/indices/simulateindextemplate/response.go old mode 100755 new mode 100644 index 29c283184d..b73297823c --- a/typedapi/indices/simulateindextemplate/response.go +++ b/typedapi/indices/simulateindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package simulateindextemplate // Response holds the response body struct for the package simulateindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/indices/simulateindextemplate/simulate_index_template.go b/typedapi/indices/simulateindextemplate/simulate_index_template.go old mode 100755 new mode 100644 index be5fafaef4..22aed3e1f3 --- a/typedapi/indices/simulateindextemplate/simulate_index_template.go +++ b/typedapi/indices/simulateindextemplate/simulate_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Simulate matching the given index name against the index templates in the // system @@ -208,7 +208,6 @@ func (r SimulateIndexTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/simulatetemplate/response.go b/typedapi/indices/simulatetemplate/response.go old mode 100755 new mode 100644 index 3d6f87b239..23b77790ee --- a/typedapi/indices/simulatetemplate/response.go +++ b/typedapi/indices/simulatetemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package simulatetemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulatetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 type Response struct { Overlapping []types.Overlapping `json:"overlapping,omitempty"` diff --git a/typedapi/indices/simulatetemplate/simulate_template.go b/typedapi/indices/simulatetemplate/simulate_template.go old mode 100755 new mode 100644 index 2546f8a6d8..7cd6b5050b --- a/typedapi/indices/simulatetemplate/simulate_template.go +++ b/typedapi/indices/simulatetemplate/simulate_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Simulate resolving the given template name or body package simulatetemplate @@ -211,7 +211,6 @@ func (r SimulateTemplate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/split/request.go b/typedapi/indices/split/request.go old mode 100755 new mode 100644 index 972b802c21..1d555233bb --- a/typedapi/indices/split/request.go +++ b/typedapi/indices/split/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package split @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/split/IndicesSplitRequest.ts#L27-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/split/IndicesSplitRequest.ts#L27-L46 type Request struct { Aliases map[string]types.Alias `json:"aliases,omitempty"` Settings map[string]json.RawMessage `json:"settings,omitempty"` diff --git a/typedapi/indices/split/response.go b/typedapi/indices/split/response.go old mode 100755 new mode 100644 index adc7dc4e3b..ccfac5d9bf --- a/typedapi/indices/split/response.go +++ b/typedapi/indices/split/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package split // Response holds the response body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/split/IndicesSplitResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/split/IndicesSplitResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/split/split.go b/typedapi/indices/split/split.go old mode 100755 new mode 100644 index 5d2553baf5..35852f2926 --- a/typedapi/indices/split/split.go +++ b/typedapi/indices/split/split.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows you to split an existing index into a new index with more primary // shards. @@ -213,7 +213,6 @@ func (r Split) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/stats/response.go b/typedapi/indices/stats/response.go old mode 100755 new mode 100644 index 87825eaa1f..32aac0c21d --- a/typedapi/indices/stats/response.go +++ b/typedapi/indices/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 type Response struct { All_ types.IndicesStats `json:"_all"` diff --git a/typedapi/indices/stats/stats.go b/typedapi/indices/stats/stats.go old mode 100755 new mode 100644 index c6459d0e0a..ef810b8f51 --- a/typedapi/indices/stats/stats.go +++ b/typedapi/indices/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Provides statistics on operations happening in an index. package stats @@ -196,7 +196,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/unfreeze/response.go b/typedapi/indices/unfreeze/response.go old mode 100755 new mode 100644 index c740fa33f1..ddb325cf00 --- a/typedapi/indices/unfreeze/response.go +++ b/typedapi/indices/unfreeze/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package unfreeze // Response holds the response body struct for the package unfreeze // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/indices/unfreeze/unfreeze.go b/typedapi/indices/unfreeze/unfreeze.go old mode 100755 new mode 100644 index a3194583ba..3af968d436 --- a/typedapi/indices/unfreeze/unfreeze.go +++ b/typedapi/indices/unfreeze/unfreeze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Unfreezes an index. When a frozen index is unfrozen, the index goes through // the normal recovery process and becomes writeable again. @@ -171,7 +171,6 @@ func (r Unfreeze) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/updatealiases/request.go b/typedapi/indices/updatealiases/request.go old mode 100755 new mode 100644 index 3cf80a347d..704492d959 --- a/typedapi/indices/updatealiases/request.go +++ b/typedapi/indices/updatealiases/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatealiases @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L37 type Request struct { Actions []types.IndicesAction `json:"actions,omitempty"` } diff --git a/typedapi/indices/updatealiases/response.go b/typedapi/indices/updatealiases/response.go old mode 100755 new mode 100644 index e55f823672..74b666fce0 --- a/typedapi/indices/updatealiases/response.go +++ b/typedapi/indices/updatealiases/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatealiases // Response holds the response body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/indices/updatealiases/update_aliases.go b/typedapi/indices/updatealiases/update_aliases.go old mode 100755 new mode 100644 index 14e9087ea0..3fc53bce07 --- a/typedapi/indices/updatealiases/update_aliases.go +++ b/typedapi/indices/updatealiases/update_aliases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates index aliases. package updatealiases @@ -192,7 +192,6 @@ func (r UpdateAliases) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/indices/validatequery/request.go b/typedapi/indices/validatequery/request.go old mode 100755 new mode 100644 index 3fe1557a70..5f1b440d70 --- a/typedapi/indices/validatequery/request.go +++ b/typedapi/indices/validatequery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package validatequery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L51 type Request struct { Query *types.Query `json:"query,omitempty"` } diff --git a/typedapi/indices/validatequery/response.go b/typedapi/indices/validatequery/response.go old mode 100755 new mode 100644 index 2937f42b94..8663dbc1f7 --- a/typedapi/indices/validatequery/response.go +++ b/typedapi/indices/validatequery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package validatequery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 type Response struct { Error *string `json:"error,omitempty"` diff --git a/typedapi/indices/validatequery/validate_query.go b/typedapi/indices/validatequery/validate_query.go old mode 100755 new mode 100644 index 562a4c9e00..6b4253877b --- a/typedapi/indices/validatequery/validate_query.go +++ b/typedapi/indices/validatequery/validate_query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows a user to validate a potentially expensive query without executing it. package validatequery @@ -213,7 +213,6 @@ func (r ValidateQuery) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/deletepipeline/delete_pipeline.go b/typedapi/ingest/deletepipeline/delete_pipeline.go old mode 100755 new mode 100644 index bacad9abcf..397fcf6e2a --- a/typedapi/ingest/deletepipeline/delete_pipeline.go +++ b/typedapi/ingest/deletepipeline/delete_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a pipeline. package deletepipeline @@ -170,7 +170,6 @@ func (r DeletePipeline) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/deletepipeline/response.go b/typedapi/ingest/deletepipeline/response.go old mode 100755 new mode 100644 index 2e01501aa3..f5704c5051 --- a/typedapi/ingest/deletepipeline/response.go +++ b/typedapi/ingest/deletepipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletepipeline // Response holds the response body struct for the package deletepipeline // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ingest/geoipstats/geo_ip_stats.go b/typedapi/ingest/geoipstats/geo_ip_stats.go old mode 100755 new mode 100644 index 4bea1d991a..d841cc7408 --- a/typedapi/ingest/geoipstats/geo_ip_stats.go +++ b/typedapi/ingest/geoipstats/geo_ip_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns statistical information about geoip databases package geoipstats @@ -161,7 +161,6 @@ func (r GeoIpStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/geoipstats/response.go b/typedapi/ingest/geoipstats/response.go old mode 100755 new mode 100644 index 4c0fe0a413..f8bf33ac6a --- a/typedapi/ingest/geoipstats/response.go +++ b/typedapi/ingest/geoipstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package geoipstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package geoipstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 type Response struct { diff --git a/typedapi/ingest/getpipeline/get_pipeline.go b/typedapi/ingest/getpipeline/get_pipeline.go old mode 100755 new mode 100644 index d1b35e6b55..3b4010c69f --- a/typedapi/ingest/getpipeline/get_pipeline.go +++ b/typedapi/ingest/getpipeline/get_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a pipeline. package getpipeline @@ -176,7 +176,6 @@ func (r GetPipeline) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/getpipeline/response.go b/typedapi/ingest/getpipeline/response.go old mode 100755 new mode 100644 index 38722b1986..cb82a61fed --- a/typedapi/ingest/getpipeline/response.go +++ b/typedapi/ingest/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L25 type Response map[string]types.IngestPipeline diff --git a/typedapi/ingest/processorgrok/processor_grok.go b/typedapi/ingest/processorgrok/processor_grok.go old mode 100755 new mode 100644 index 4d52ffa6e1..b264597099 --- a/typedapi/ingest/processorgrok/processor_grok.go +++ b/typedapi/ingest/processorgrok/processor_grok.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a list of the built-in patterns. package processorgrok @@ -161,7 +161,6 @@ func (r ProcessorGrok) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/processorgrok/response.go b/typedapi/ingest/processorgrok/response.go old mode 100755 new mode 100644 index 2decc3ab72..98efabb8e8 --- a/typedapi/ingest/processorgrok/response.go +++ b/typedapi/ingest/processorgrok/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package processorgrok // Response holds the response body struct for the package processorgrok // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 type Response struct { Patterns map[string]string `json:"patterns"` diff --git a/typedapi/ingest/putpipeline/put_pipeline.go b/typedapi/ingest/putpipeline/put_pipeline.go old mode 100755 new mode 100644 index 842db9f2a8..adad59f736 --- a/typedapi/ingest/putpipeline/put_pipeline.go +++ b/typedapi/ingest/putpipeline/put_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates a pipeline. package putpipeline @@ -205,7 +205,6 @@ func (r PutPipeline) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ingest/putpipeline/request.go b/typedapi/ingest/putpipeline/request.go old mode 100755 new mode 100644 index 8bd7045898..abecc94546 --- a/typedapi/ingest/putpipeline/request.go +++ b/typedapi/ingest/putpipeline/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putpipeline @@ -29,14 +29,14 @@ import ( // Request holds the request body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L74 type Request struct { // Description Description of the ingest pipeline. Description *string `json:"description,omitempty"` // Meta_ Optional metadata about the ingest pipeline. May have any contents. This map // is not automatically generated by Elasticsearch. - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` // OnFailure Processors to run immediately after a processor failure. Each processor // supports a processor-level `on_failure` value. If a processor without an // `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as diff --git a/typedapi/ingest/putpipeline/response.go b/typedapi/ingest/putpipeline/response.go old mode 100755 new mode 100644 index a1a308822a..201df4e3db --- a/typedapi/ingest/putpipeline/response.go +++ b/typedapi/ingest/putpipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putpipeline // Response holds the response body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ingest/simulate/request.go b/typedapi/ingest/simulate/request.go old mode 100755 new mode 100644 index 4a4b2f3e5d..7c42ef2441 --- a/typedapi/ingest/simulate/request.go +++ b/typedapi/ingest/simulate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package simulate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L41 type Request struct { Docs []types.Document `json:"docs,omitempty"` Pipeline *types.IngestPipeline `json:"pipeline,omitempty"` diff --git a/typedapi/ingest/simulate/response.go b/typedapi/ingest/simulate/response.go old mode 100755 new mode 100644 index e27b8465c1..09d000649a --- a/typedapi/ingest/simulate/response.go +++ b/typedapi/ingest/simulate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package simulate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 type Response struct { Docs []types.PipelineSimulation `json:"docs"` diff --git a/typedapi/ingest/simulate/simulate.go b/typedapi/ingest/simulate/simulate.go old mode 100755 new mode 100644 index cb4627cce3..19e1e54847 --- a/typedapi/ingest/simulate/simulate.go +++ b/typedapi/ingest/simulate/simulate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows to simulate a pipeline with example documents. package simulate @@ -215,7 +215,6 @@ func (r Simulate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/delete/delete.go b/typedapi/license/delete/delete.go old mode 100755 new mode 100644 index 53ea3c600a..618588d6df --- a/typedapi/license/delete/delete.go +++ b/typedapi/license/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes licensing information for the cluster package delete @@ -157,7 +157,6 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/delete/response.go b/typedapi/license/delete/response.go old mode 100755 new mode 100644 index fc773d4a8c..4529fe3ccd --- a/typedapi/license/delete/response.go +++ b/typedapi/license/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/license/get/get.go b/typedapi/license/get/get.go old mode 100755 new mode 100644 index 7a83f118a9..9980eb97e6 --- a/typedapi/license/get/get.go +++ b/typedapi/license/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves licensing information for the cluster package get @@ -158,7 +158,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/get/response.go b/typedapi/license/get/response.go old mode 100755 new mode 100644 index 195e018714..24935f22c7 --- a/typedapi/license/get/response.go +++ b/typedapi/license/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/get/GetLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/get/GetLicenseResponse.ts#L22-L24 type Response struct { License types.LicenseInformation `json:"license"` diff --git a/typedapi/license/getbasicstatus/get_basic_status.go b/typedapi/license/getbasicstatus/get_basic_status.go old mode 100755 new mode 100644 index 28d13f16cf..8c04ca952f --- a/typedapi/license/getbasicstatus/get_basic_status.go +++ b/typedapi/license/getbasicstatus/get_basic_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the status of the basic license. package getbasicstatus @@ -159,7 +159,6 @@ func (r GetBasicStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/getbasicstatus/response.go b/typedapi/license/getbasicstatus/response.go old mode 100755 new mode 100644 index 1fe703daec..2f885bf0d4 --- a/typedapi/license/getbasicstatus/response.go +++ b/typedapi/license/getbasicstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getbasicstatus // Response holds the response body struct for the package getbasicstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartBasic bool `json:"eligible_to_start_basic"` diff --git a/typedapi/license/gettrialstatus/get_trial_status.go b/typedapi/license/gettrialstatus/get_trial_status.go old mode 100755 new mode 100644 index 1d7902f39f..59af5161e9 --- a/typedapi/license/gettrialstatus/get_trial_status.go +++ b/typedapi/license/gettrialstatus/get_trial_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the status of the trial license. package gettrialstatus @@ -159,7 +159,6 @@ func (r GetTrialStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/gettrialstatus/response.go b/typedapi/license/gettrialstatus/response.go old mode 100755 new mode 100644 index 9f795328f5..6795236320 --- a/typedapi/license/gettrialstatus/response.go +++ b/typedapi/license/gettrialstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettrialstatus // Response holds the response body struct for the package gettrialstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartTrial bool `json:"eligible_to_start_trial"` diff --git a/typedapi/license/post/post.go b/typedapi/license/post/post.go old mode 100755 new mode 100644 index 34eb98d221..6a4770f395 --- a/typedapi/license/post/post.go +++ b/typedapi/license/post/post.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the license for the cluster. package post @@ -193,7 +193,6 @@ func (r Post) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/post/request.go b/typedapi/license/post/request.go old mode 100755 new mode 100644 index c682b07eab..10e5712dc9 --- a/typedapi/license/post/request.go +++ b/typedapi/license/post/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package post @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/post/PostLicenseRequest.ts#L23-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/post/PostLicenseRequest.ts#L23-L44 type Request struct { License *types.License `json:"license,omitempty"` // Licenses A sequence of one or more JSON documents containing the license information. diff --git a/typedapi/license/post/response.go b/typedapi/license/post/response.go old mode 100755 new mode 100644 index 065d56ed77..4d13fdf374 --- a/typedapi/license/post/response.go +++ b/typedapi/license/post/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package post @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/post/PostLicenseResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/post/PostLicenseResponse.ts#L23-L29 type Response struct { Acknowledge *types.Acknowledgement `json:"acknowledge,omitempty"` diff --git a/typedapi/license/poststartbasic/post_start_basic.go b/typedapi/license/poststartbasic/post_start_basic.go old mode 100755 new mode 100644 index 7ee596ac43..88cb05a306 --- a/typedapi/license/poststartbasic/post_start_basic.go +++ b/typedapi/license/poststartbasic/post_start_basic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts an indefinite basic license. package poststartbasic @@ -160,7 +160,6 @@ func (r PostStartBasic) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/poststartbasic/response.go b/typedapi/license/poststartbasic/response.go old mode 100755 new mode 100644 index 795c6a589a..b8b9de40af --- a/typedapi/license/poststartbasic/response.go +++ b/typedapi/license/poststartbasic/response.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package poststartbasic import ( + "bytes" + "encoding/json" + "errors" + "io" + "strconv" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensetype" ) // Response holds the response body struct for the package poststartbasic // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 type Response struct { Acknowledge map[string][]string `json:"acknowledge,omitempty"` @@ -43,3 +49,88 @@ func NewResponse() *Response { } return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "acknowledge": + if s.Acknowledge == nil { + s.Acknowledge = make(map[string][]string, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := new(string) + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Acknowledge[key] = append(s.Acknowledge[key], *o) + default: + o := []string{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Acknowledge[key] = o + } + } + + case "acknowledged": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Acknowledged = value + case bool: + s.Acknowledged = v + } + + case "basic_was_started": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BasicWasStarted = value + case bool: + s.BasicWasStarted = v + } + + case "error_message": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ErrorMessage = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} diff --git a/typedapi/license/poststarttrial/post_start_trial.go b/typedapi/license/poststarttrial/post_start_trial.go old mode 100755 new mode 100644 index 90f6aeb08e..0b499a326a --- a/typedapi/license/poststarttrial/post_start_trial.go +++ b/typedapi/license/poststarttrial/post_start_trial.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // starts a limited time trial license. package poststarttrial @@ -160,7 +160,6 @@ func (r PostStartTrial) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/license/poststarttrial/response.go b/typedapi/license/poststarttrial/response.go old mode 100755 new mode 100644 index 06e0f4fcac..9165b6f6bf --- a/typedapi/license/poststarttrial/response.go +++ b/typedapi/license/poststarttrial/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package poststarttrial @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package poststarttrial // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/logstash/deletepipeline/delete_pipeline.go b/typedapi/logstash/deletepipeline/delete_pipeline.go old mode 100755 new mode 100644 index 40d6d59fdb..b76bf071cc --- a/typedapi/logstash/deletepipeline/delete_pipeline.go +++ b/typedapi/logstash/deletepipeline/delete_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes Logstash Pipelines used by Central Management package deletepipeline @@ -24,7 +24,6 @@ package deletepipeline import ( gobytes "bytes" "context" - "encoding/json" "errors" "fmt" "io" @@ -34,7 +33,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) const ( @@ -152,36 +150,6 @@ func (r DeletePipeline) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a deletepipeline.Response -func (r DeletePipeline) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. func (r DeletePipeline) IsSuccess(ctx context.Context) (bool, error) { diff --git a/typedapi/logstash/deletepipeline/response.go b/typedapi/logstash/deletepipeline/response.go deleted file mode 100755 index d84704747d..0000000000 --- a/typedapi/logstash/deletepipeline/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package deletepipeline - -// Response holds the response body struct for the package deletepipeline -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/delete_pipeline/LogstashDeletePipelineResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/logstash/getpipeline/get_pipeline.go b/typedapi/logstash/getpipeline/get_pipeline.go old mode 100755 new mode 100644 index 8cf3fd573e..6c2b9de3cc --- a/typedapi/logstash/getpipeline/get_pipeline.go +++ b/typedapi/logstash/getpipeline/get_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves Logstash Pipelines used by Central Management package getpipeline @@ -99,6 +99,13 @@ func (r *GetPipeline) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_logstash") + path.WriteString("/") + path.WriteString("pipeline") + + method = http.MethodGet case r.paramSet == idMask: path.WriteString("/") path.WriteString("_logstash") @@ -170,7 +177,6 @@ func (r GetPipeline) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/logstash/getpipeline/response.go b/typedapi/logstash/getpipeline/response.go old mode 100755 new mode 100644 index a921126f8c..1c58b267a4 --- a/typedapi/logstash/getpipeline/response.go +++ b/typedapi/logstash/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L26 type Response map[string]types.LogstashPipeline diff --git a/typedapi/logstash/putpipeline/put_pipeline.go b/typedapi/logstash/putpipeline/put_pipeline.go old mode 100755 new mode 100644 index 62b1f1cfed..6c0beed6f5 --- a/typedapi/logstash/putpipeline/put_pipeline.go +++ b/typedapi/logstash/putpipeline/put_pipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds and updates Logstash Pipelines used for Central Management package putpipeline @@ -187,36 +187,6 @@ func (r PutPipeline) Perform(ctx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a putpipeline.Response -func (r PutPipeline) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // Header set a key, value pair in the PutPipeline headers map. func (r *PutPipeline) Header(key, value string) *PutPipeline { r.headers.Set(key, value) diff --git a/typedapi/logstash/putpipeline/response.go b/typedapi/logstash/putpipeline/response.go deleted file mode 100755 index 06ba1d64d7..0000000000 --- a/typedapi/logstash/putpipeline/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package putpipeline - -// Response holds the response body struct for the package putpipeline -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/put_pipeline/LogstashPutPipelineResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/migration/deprecations/deprecations.go b/typedapi/migration/deprecations/deprecations.go old mode 100755 new mode 100644 index 5c382d8602..49df8758cc --- a/typedapi/migration/deprecations/deprecations.go +++ b/typedapi/migration/deprecations/deprecations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about different cluster, node, and index level settings // that use deprecated features that will be removed or changed in the next @@ -179,7 +179,6 @@ func (r Deprecations) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/migration/deprecations/response.go b/typedapi/migration/deprecations/response.go old mode 100755 new mode 100644 index 92b46fcac7..79452f46eb --- a/typedapi/migration/deprecations/response.go +++ b/typedapi/migration/deprecations/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deprecations @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deprecations // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 type Response struct { ClusterSettings []types.Deprecation `json:"cluster_settings"` diff --git a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go old mode 100755 new mode 100644 index c2a1ab9d3b..1ed8abbe6a --- a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go +++ b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Find out whether system features need to be upgraded or not package getfeatureupgradestatus @@ -159,7 +159,6 @@ func (r GetFeatureUpgradeStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/migration/getfeatureupgradestatus/response.go b/typedapi/migration/getfeatureupgradestatus/response.go old mode 100755 new mode 100644 index 4678727fd8..796d9aab12 --- a/typedapi/migration/getfeatureupgradestatus/response.go +++ b/typedapi/migration/getfeatureupgradestatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getfeatureupgradestatus @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package getfeatureupgradestatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 type Response struct { Features []types.GetMigrationFeature `json:"features"` diff --git a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go old mode 100755 new mode 100644 index 554ef16a72..cc9abd6b92 --- a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go +++ b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Begin upgrades for system features package postfeatureupgrade @@ -159,7 +159,6 @@ func (r PostFeatureUpgrade) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/migration/postfeatureupgrade/response.go b/typedapi/migration/postfeatureupgrade/response.go old mode 100755 new mode 100644 index 8693284f67..7fe376e079 --- a/typedapi/migration/postfeatureupgrade/response.go +++ b/typedapi/migration/postfeatureupgrade/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package postfeatureupgrade @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postfeatureupgrade // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 type Response struct { Accepted bool `json:"accepted"` diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go old mode 100755 new mode 100644 index 4a855a8b57..c7e500c792 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clear the cached results from a trained model deployment package cleartrainedmodeldeploymentcache @@ -182,7 +182,6 @@ func (r ClearTrainedModelDeploymentCache) Do(ctx context.Context) (*Response, er } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go old mode 100755 new mode 100644 index 94031884b5..1cd9758445 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package cleartrainedmodeldeploymentcache // Response holds the response body struct for the package cleartrainedmodeldeploymentcache // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 type Response struct { Cleared bool `json:"cleared"` diff --git a/typedapi/ml/closejob/close_job.go b/typedapi/ml/closejob/close_job.go old mode 100755 new mode 100644 index e34382f172..294f679d21 --- a/typedapi/ml/closejob/close_job.go +++ b/typedapi/ml/closejob/close_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Closes one or more anomaly detection jobs. A job can be opened and closed // multiple times throughout its lifecycle. @@ -210,7 +210,6 @@ func (r CloseJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/closejob/request.go b/typedapi/ml/closejob/request.go old mode 100755 new mode 100644 index d391f1c719..185d57bdee --- a/typedapi/ml/closejob/request.go +++ b/typedapi/ml/closejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package closejob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. diff --git a/typedapi/ml/closejob/response.go b/typedapi/ml/closejob/response.go old mode 100755 new mode 100644 index 45b560e225..08d23b59b8 --- a/typedapi/ml/closejob/response.go +++ b/typedapi/ml/closejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package closejob // Response holds the response body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 type Response struct { Closed bool `json:"closed"` diff --git a/typedapi/ml/deletecalendar/delete_calendar.go b/typedapi/ml/deletecalendar/delete_calendar.go old mode 100755 new mode 100644 index b3b84f7fde..dc68f38126 --- a/typedapi/ml/deletecalendar/delete_calendar.go +++ b/typedapi/ml/deletecalendar/delete_calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a calendar. package deletecalendar @@ -170,7 +170,6 @@ func (r DeleteCalendar) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletecalendar/response.go b/typedapi/ml/deletecalendar/response.go old mode 100755 new mode 100644 index f48183f4b8..f071046a43 --- a/typedapi/ml/deletecalendar/response.go +++ b/typedapi/ml/deletecalendar/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletecalendar // Response holds the response body struct for the package deletecalendar // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletecalendarevent/delete_calendar_event.go b/typedapi/ml/deletecalendarevent/delete_calendar_event.go old mode 100755 new mode 100644 index 671d1c0b63..ac3ab4270c --- a/typedapi/ml/deletecalendarevent/delete_calendar_event.go +++ b/typedapi/ml/deletecalendarevent/delete_calendar_event.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes scheduled events from a calendar. package deletecalendarevent @@ -180,7 +180,6 @@ func (r DeleteCalendarEvent) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletecalendarevent/response.go b/typedapi/ml/deletecalendarevent/response.go old mode 100755 new mode 100644 index cf65861376..abef3b0cce --- a/typedapi/ml/deletecalendarevent/response.go +++ b/typedapi/ml/deletecalendarevent/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletecalendarevent // Response holds the response body struct for the package deletecalendarevent // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletecalendarjob/delete_calendar_job.go b/typedapi/ml/deletecalendarjob/delete_calendar_job.go old mode 100755 new mode 100644 index 2054a12d5e..24e8e6e6ee --- a/typedapi/ml/deletecalendarjob/delete_calendar_job.go +++ b/typedapi/ml/deletecalendarjob/delete_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes anomaly detection jobs from a calendar. package deletecalendarjob @@ -180,7 +180,6 @@ func (r DeleteCalendarJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletecalendarjob/response.go b/typedapi/ml/deletecalendarjob/response.go old mode 100755 new mode 100644 index 0f91e89f0f..79da7c0f7c --- a/typedapi/ml/deletecalendarjob/response.go +++ b/typedapi/ml/deletecalendarjob/response.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletecalendarjob +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // Response holds the response body struct for the package deletecalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 type Response struct { @@ -39,3 +46,51 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_id": + if err := dec.Decode(&s.CalendarId); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "job_ids": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.JobIds = append(s.JobIds, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { + return err + } + } + + } + } + return nil +} diff --git a/typedapi/ml/deletedatafeed/delete_datafeed.go b/typedapi/ml/deletedatafeed/delete_datafeed.go old mode 100755 new mode 100644 index b56a4b73cf..209a2d485a --- a/typedapi/ml/deletedatafeed/delete_datafeed.go +++ b/typedapi/ml/deletedatafeed/delete_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing datafeed. package deletedatafeed @@ -171,7 +171,6 @@ func (r DeleteDatafeed) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletedatafeed/response.go b/typedapi/ml/deletedatafeed/response.go old mode 100755 new mode 100644 index e9b58bd58b..214c76da77 --- a/typedapi/ml/deletedatafeed/response.go +++ b/typedapi/ml/deletedatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletedatafeed // Response holds the response body struct for the package deletedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go old mode 100755 new mode 100644 index b996eebbe5..d13cd7bcae --- a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go +++ b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing data frame analytics job. package deletedataframeanalytics @@ -173,7 +173,6 @@ func (r DeleteDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletedataframeanalytics/response.go b/typedapi/ml/deletedataframeanalytics/response.go old mode 100755 new mode 100644 index 8246ccc9db..21ecde1937 --- a/typedapi/ml/deletedataframeanalytics/response.go +++ b/typedapi/ml/deletedataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletedataframeanalytics // Response holds the response body struct for the package deletedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deleteexpireddata/delete_expired_data.go b/typedapi/ml/deleteexpireddata/delete_expired_data.go old mode 100755 new mode 100644 index f2b02a3217..e732157c79 --- a/typedapi/ml/deleteexpireddata/delete_expired_data.go +++ b/typedapi/ml/deleteexpireddata/delete_expired_data.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes expired and unused machine learning data. package deleteexpireddata @@ -210,7 +210,6 @@ func (r DeleteExpiredData) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deleteexpireddata/request.go b/typedapi/ml/deleteexpireddata/request.go old mode 100755 new mode 100644 index 75d84a7ca3..d2abb5c008 --- a/typedapi/ml/deleteexpireddata/request.go +++ b/typedapi/ml/deleteexpireddata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteexpireddata @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 type Request struct { // RequestsPerSecond The desired requests per second for the deletion processes. The default diff --git a/typedapi/ml/deleteexpireddata/response.go b/typedapi/ml/deleteexpireddata/response.go old mode 100755 new mode 100644 index 0e90e7f440..84c82c9a3d --- a/typedapi/ml/deleteexpireddata/response.go +++ b/typedapi/ml/deleteexpireddata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteexpireddata // Response holds the response body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 type Response struct { Deleted bool `json:"deleted"` diff --git a/typedapi/ml/deletefilter/delete_filter.go b/typedapi/ml/deletefilter/delete_filter.go old mode 100755 new mode 100644 index 9719aa4cbf..622d3dc3b8 --- a/typedapi/ml/deletefilter/delete_filter.go +++ b/typedapi/ml/deletefilter/delete_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a filter. package deletefilter @@ -170,7 +170,6 @@ func (r DeleteFilter) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletefilter/response.go b/typedapi/ml/deletefilter/response.go old mode 100755 new mode 100644 index 7bed22a5df..0d60bf077f --- a/typedapi/ml/deletefilter/response.go +++ b/typedapi/ml/deletefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletefilter // Response holds the response body struct for the package deletefilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deleteforecast/delete_forecast.go b/typedapi/ml/deleteforecast/delete_forecast.go old mode 100755 new mode 100644 index db726d9f84..ef514b500d --- a/typedapi/ml/deleteforecast/delete_forecast.go +++ b/typedapi/ml/deleteforecast/delete_forecast.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes forecasts from a machine learning job. package deleteforecast @@ -191,7 +191,6 @@ func (r DeleteForecast) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deleteforecast/response.go b/typedapi/ml/deleteforecast/response.go old mode 100755 new mode 100644 index 7b79ecabf2..dbc2531145 --- a/typedapi/ml/deleteforecast/response.go +++ b/typedapi/ml/deleteforecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteforecast // Response holds the response body struct for the package deleteforecast // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletejob/delete_job.go b/typedapi/ml/deletejob/delete_job.go old mode 100755 new mode 100644 index 3e0bc80157..e919b8bf82 --- a/typedapi/ml/deletejob/delete_job.go +++ b/typedapi/ml/deletejob/delete_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing anomaly detection job. package deletejob @@ -171,7 +171,6 @@ func (r DeleteJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletejob/response.go b/typedapi/ml/deletejob/response.go old mode 100755 new mode 100644 index df21ec3e8a..9482f51032 --- a/typedapi/ml/deletejob/response.go +++ b/typedapi/ml/deletejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletejob // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go old mode 100755 new mode 100644 index 29385321e5..55b78d31c2 --- a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go +++ b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing model snapshot. package deletemodelsnapshot @@ -180,7 +180,6 @@ func (r DeleteModelSnapshot) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletemodelsnapshot/response.go b/typedapi/ml/deletemodelsnapshot/response.go old mode 100755 new mode 100644 index ebbad9c74f..d82ab75fa7 --- a/typedapi/ml/deletemodelsnapshot/response.go +++ b/typedapi/ml/deletemodelsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletemodelsnapshot // Response holds the response body struct for the package deletemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletetrainedmodel/delete_trained_model.go b/typedapi/ml/deletetrainedmodel/delete_trained_model.go old mode 100755 new mode 100644 index 8f9f3f965c..06affb3a02 --- a/typedapi/ml/deletetrainedmodel/delete_trained_model.go +++ b/typedapi/ml/deletetrainedmodel/delete_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing trained inference model that is currently not referenced // by an ingest pipeline. @@ -173,7 +173,6 @@ func (r DeleteTrainedModel) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletetrainedmodel/response.go b/typedapi/ml/deletetrainedmodel/response.go old mode 100755 new mode 100644 index 88dc549aff..68d30d0f13 --- a/typedapi/ml/deletetrainedmodel/response.go +++ b/typedapi/ml/deletetrainedmodel/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletetrainedmodel // Response holds the response body struct for the package deletetrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go old mode 100755 new mode 100644 index 10dde8a83e..1ae1d0ddec --- a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go +++ b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a model alias that refers to the trained model package deletetrainedmodelalias @@ -186,7 +186,6 @@ func (r DeleteTrainedModelAlias) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/deletetrainedmodelalias/response.go b/typedapi/ml/deletetrainedmodelalias/response.go old mode 100755 new mode 100644 index c0a3f343da..7f3a78b098 --- a/typedapi/ml/deletetrainedmodelalias/response.go +++ b/typedapi/ml/deletetrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletetrainedmodelalias // Response holds the response body struct for the package deletetrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go old mode 100755 new mode 100644 index 8b31d2bea0..989576b24f --- a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go +++ b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Estimates the model memory package estimatemodelmemory @@ -196,7 +196,6 @@ func (r EstimateModelMemory) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/estimatemodelmemory/request.go b/typedapi/ml/estimatemodelmemory/request.go old mode 100755 new mode 100644 index f8c6f55c68..6a76f3819a --- a/typedapi/ml/estimatemodelmemory/request.go +++ b/typedapi/ml/estimatemodelmemory/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package estimatemodelmemory @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 type Request struct { // AnalysisConfig For a list of the properties that you can specify in the diff --git a/typedapi/ml/estimatemodelmemory/response.go b/typedapi/ml/estimatemodelmemory/response.go old mode 100755 new mode 100644 index 8021662c08..c6f2dd12bf --- a/typedapi/ml/estimatemodelmemory/response.go +++ b/typedapi/ml/estimatemodelmemory/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package estimatemodelmemory // Response holds the response body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 type Response struct { ModelMemoryEstimate string `json:"model_memory_estimate"` diff --git a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go old mode 100755 new mode 100644 index aacbb8f618..af3bb8f239 --- a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go +++ b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evaluates the data frame analytics for an annotated index. package evaluatedataframe @@ -196,7 +196,6 @@ func (r EvaluateDataFrame) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/evaluatedataframe/request.go b/typedapi/ml/evaluatedataframe/request.go old mode 100755 new mode 100644 index 11347b223e..b45b4bf6fd --- a/typedapi/ml/evaluatedataframe/request.go +++ b/typedapi/ml/evaluatedataframe/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package evaluatedataframe @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 type Request struct { // Evaluation Defines the type of evaluation you want to perform. diff --git a/typedapi/ml/evaluatedataframe/response.go b/typedapi/ml/evaluatedataframe/response.go old mode 100755 new mode 100644 index b741d5934c..b289de6909 --- a/typedapi/ml/evaluatedataframe/response.go +++ b/typedapi/ml/evaluatedataframe/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package evaluatedataframe @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 type Response struct { Classification *types.DataframeClassificationSummary `json:"classification,omitempty"` diff --git a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go old mode 100755 new mode 100644 index 1c7aa6bd4d..200819e7c8 --- a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go +++ b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Explains a data frame analytics config. package explaindataframeanalytics @@ -218,7 +218,6 @@ func (r ExplainDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/explaindataframeanalytics/request.go b/typedapi/ml/explaindataframeanalytics/request.go old mode 100755 new mode 100644 index 1b5e6b9e4c..17124dff65 --- a/typedapi/ml/explaindataframeanalytics/request.go +++ b/typedapi/ml/explaindataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explaindataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine diff --git a/typedapi/ml/explaindataframeanalytics/response.go b/typedapi/ml/explaindataframeanalytics/response.go old mode 100755 new mode 100644 index d4b6e44694..c928f9736f --- a/typedapi/ml/explaindataframeanalytics/response.go +++ b/typedapi/ml/explaindataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package explaindataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 type Response struct { diff --git a/typedapi/ml/flushjob/flush_job.go b/typedapi/ml/flushjob/flush_job.go old mode 100755 new mode 100644 index 0205c6aa72..154eaaac4d --- a/typedapi/ml/flushjob/flush_job.go +++ b/typedapi/ml/flushjob/flush_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Forces any buffered data to be processed by the job. package flushjob @@ -208,7 +208,6 @@ func (r FlushJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/flushjob/request.go b/typedapi/ml/flushjob/request.go old mode 100755 new mode 100644 index 0ba1cb55ce..01d5f81671 --- a/typedapi/ml/flushjob/request.go +++ b/typedapi/ml/flushjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package flushjob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 type Request struct { // AdvanceTime Refer to the description for the `advance_time` query parameter. diff --git a/typedapi/ml/flushjob/response.go b/typedapi/ml/flushjob/response.go old mode 100755 new mode 100644 index a6cd76f9ec..a74b1b694f --- a/typedapi/ml/flushjob/response.go +++ b/typedapi/ml/flushjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package flushjob // Response holds the response body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 type Response struct { Flushed bool `json:"flushed"` diff --git a/typedapi/ml/forecast/forecast.go b/typedapi/ml/forecast/forecast.go old mode 100755 new mode 100644 index c857dde209..abe881e848 --- a/typedapi/ml/forecast/forecast.go +++ b/typedapi/ml/forecast/forecast.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Predicts the future behavior of a time series by using its historical // behavior. @@ -209,7 +209,6 @@ func (r Forecast) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/forecast/request.go b/typedapi/ml/forecast/request.go old mode 100755 new mode 100644 index 43c601d5ae..e869baf91c --- a/typedapi/ml/forecast/request.go +++ b/typedapi/ml/forecast/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package forecast @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 type Request struct { // Duration Refer to the description for the `duration` query parameter. diff --git a/typedapi/ml/forecast/response.go b/typedapi/ml/forecast/response.go old mode 100755 new mode 100644 index df72ef301c..d080b23820 --- a/typedapi/ml/forecast/response.go +++ b/typedapi/ml/forecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package forecast // Response holds the response body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/ml/getbuckets/get_buckets.go b/typedapi/ml/getbuckets/get_buckets.go old mode 100755 new mode 100644 index a17f9bd688..161add476d --- a/typedapi/ml/getbuckets/get_buckets.go +++ b/typedapi/ml/getbuckets/get_buckets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves anomaly detection job results for one or more buckets. package getbuckets @@ -230,7 +230,6 @@ func (r GetBuckets) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getbuckets/request.go b/typedapi/ml/getbuckets/request.go old mode 100755 new mode 100644 index 32ac27bc5a..b41a8dead0 --- a/typedapi/ml/getbuckets/request.go +++ b/typedapi/ml/getbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getbuckets @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 type Request struct { // AnomalyScore Refer to the description for the `anomaly_score` query parameter. diff --git a/typedapi/ml/getbuckets/response.go b/typedapi/ml/getbuckets/response.go old mode 100755 new mode 100644 index b654faa409..33fbf6375f --- a/typedapi/ml/getbuckets/response.go +++ b/typedapi/ml/getbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 type Response struct { Buckets []types.BucketSummary `json:"buckets"` diff --git a/typedapi/ml/getcalendarevents/get_calendar_events.go b/typedapi/ml/getcalendarevents/get_calendar_events.go old mode 100755 new mode 100644 index edeabb12bb..f02426c16c --- a/typedapi/ml/getcalendarevents/get_calendar_events.go +++ b/typedapi/ml/getcalendarevents/get_calendar_events.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the scheduled events in calendars. package getcalendarevents @@ -173,7 +173,6 @@ func (r GetCalendarEvents) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getcalendarevents/response.go b/typedapi/ml/getcalendarevents/response.go old mode 100755 new mode 100644 index 3bde4a4c17..ee121d7e3f --- a/typedapi/ml/getcalendarevents/response.go +++ b/typedapi/ml/getcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getcalendars/get_calendars.go b/typedapi/ml/getcalendars/get_calendars.go old mode 100755 new mode 100644 index c75820f9af..f7afd5d782 --- a/typedapi/ml/getcalendars/get_calendars.go +++ b/typedapi/ml/getcalendars/get_calendars.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for calendars. package getcalendars @@ -211,7 +211,6 @@ func (r GetCalendars) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getcalendars/request.go b/typedapi/ml/getcalendars/request.go old mode 100755 new mode 100644 index 51f105920d..62b9d76da5 --- a/typedapi/ml/getcalendars/request.go +++ b/typedapi/ml/getcalendars/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcalendars @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 type Request struct { // Page This object is supported only when you omit the calendar identifier. diff --git a/typedapi/ml/getcalendars/response.go b/typedapi/ml/getcalendars/response.go old mode 100755 new mode 100644 index 7bd259ee32..ffc0c64d1d --- a/typedapi/ml/getcalendars/response.go +++ b/typedapi/ml/getcalendars/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcalendars @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 type Response struct { Calendars []types.Calendar `json:"calendars"` diff --git a/typedapi/ml/getcategories/get_categories.go b/typedapi/ml/getcategories/get_categories.go old mode 100755 new mode 100644 index 2b75038007..683ac1f684 --- a/typedapi/ml/getcategories/get_categories.go +++ b/typedapi/ml/getcategories/get_categories.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves anomaly detection job results for one or more categories. package getcategories @@ -161,7 +161,7 @@ func (r *GetCategories) HttpRequest(ctx context.Context) (*http.Request, error) path.WriteString("results") path.WriteString("/") path.WriteString("categories") - path.WriteString("/") + method = http.MethodPost } @@ -230,7 +230,6 @@ func (r GetCategories) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getcategories/request.go b/typedapi/ml/getcategories/request.go old mode 100755 new mode 100644 index ad3d7a5b74..d128b0b4a2 --- a/typedapi/ml/getcategories/request.go +++ b/typedapi/ml/getcategories/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcategories @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L66 type Request struct { Page *types.Page `json:"page,omitempty"` } diff --git a/typedapi/ml/getcategories/response.go b/typedapi/ml/getcategories/response.go old mode 100755 new mode 100644 index 43b6c9e833..5eab0c7dac --- a/typedapi/ml/getcategories/response.go +++ b/typedapi/ml/getcategories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getcategories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 type Response struct { Categories []types.Category `json:"categories"` diff --git a/typedapi/ml/getdatafeeds/get_datafeeds.go b/typedapi/ml/getdatafeeds/get_datafeeds.go old mode 100755 new mode 100644 index 8013adea37..88cf068e6c --- a/typedapi/ml/getdatafeeds/get_datafeeds.go +++ b/typedapi/ml/getdatafeeds/get_datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for datafeeds. package getdatafeeds @@ -176,7 +176,6 @@ func (r GetDatafeeds) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getdatafeeds/response.go b/typedapi/ml/getdatafeeds/response.go old mode 100755 new mode 100644 index ca10f16187..bb412caeae --- a/typedapi/ml/getdatafeeds/response.go +++ b/typedapi/ml/getdatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getdatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go old mode 100755 new mode 100644 index 07c4223d57..643e497756 --- a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go +++ b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information for datafeeds. package getdatafeedstats @@ -180,7 +180,6 @@ func (r GetDatafeedStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getdatafeedstats/response.go b/typedapi/ml/getdatafeedstats/response.go old mode 100755 new mode 100644 index 8b4eff37ff..499ac32ee1 --- a/typedapi/ml/getdatafeedstats/response.go +++ b/typedapi/ml/getdatafeedstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getdatafeedstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeedstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go old mode 100755 new mode 100644 index adc16f0ce0..4c35dca050 --- a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go +++ b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for data frame analytics jobs. package getdataframeanalytics @@ -180,7 +180,6 @@ func (r GetDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getdataframeanalytics/response.go b/typedapi/ml/getdataframeanalytics/response.go old mode 100755 new mode 100644 index f1498df6e1..6235b95f20 --- a/typedapi/ml/getdataframeanalytics/response.go +++ b/typedapi/ml/getdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 type Response struct { Count int `json:"count"` diff --git a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go old mode 100755 new mode 100644 index b1d4656274..de9bf6b329 --- a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go +++ b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information for data frame analytics jobs. package getdataframeanalyticsstats @@ -184,7 +184,6 @@ func (r GetDataFrameAnalyticsStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getdataframeanalyticsstats/response.go b/typedapi/ml/getdataframeanalyticsstats/response.go old mode 100755 new mode 100644 index f6a216dd2a..83e5a4f3b0 --- a/typedapi/ml/getdataframeanalyticsstats/response.go +++ b/typedapi/ml/getdataframeanalyticsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getdataframeanalyticsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalyticsstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getfilters/get_filters.go b/typedapi/ml/getfilters/get_filters.go old mode 100755 new mode 100644 index a0db72d9af..a4bc43646e --- a/typedapi/ml/getfilters/get_filters.go +++ b/typedapi/ml/getfilters/get_filters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves filters. package getfilters @@ -176,7 +176,6 @@ func (r GetFilters) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getfilters/response.go b/typedapi/ml/getfilters/response.go old mode 100755 new mode 100644 index 3832c3917e..6667dc6570 --- a/typedapi/ml/getfilters/response.go +++ b/typedapi/ml/getfilters/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getfilters @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfilters // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getinfluencers/get_influencers.go b/typedapi/ml/getinfluencers/get_influencers.go old mode 100755 new mode 100644 index a11d4d8ae3..a03292f008 --- a/typedapi/ml/getinfluencers/get_influencers.go +++ b/typedapi/ml/getinfluencers/get_influencers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves anomaly detection job results for one or more influencers. package getinfluencers @@ -210,7 +210,6 @@ func (r GetInfluencers) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getinfluencers/request.go b/typedapi/ml/getinfluencers/request.go old mode 100755 new mode 100644 index 56241ae884..f6e1ee60ee --- a/typedapi/ml/getinfluencers/request.go +++ b/typedapi/ml/getinfluencers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getinfluencers @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L93 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L93 type Request struct { Page *types.Page `json:"page,omitempty"` } diff --git a/typedapi/ml/getinfluencers/response.go b/typedapi/ml/getinfluencers/response.go old mode 100755 new mode 100644 index cafdf61463..837a80ab78 --- a/typedapi/ml/getinfluencers/response.go +++ b/typedapi/ml/getinfluencers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getinfluencers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getjobs/get_jobs.go b/typedapi/ml/getjobs/get_jobs.go old mode 100755 new mode 100644 index da6132eace..d0a29a9602 --- a/typedapi/ml/getjobs/get_jobs.go +++ b/typedapi/ml/getjobs/get_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for anomaly detection jobs. package getjobs @@ -176,7 +176,6 @@ func (r GetJobs) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getjobs/response.go b/typedapi/ml/getjobs/response.go old mode 100755 new mode 100644 index a71c3356d2..e071795e03 --- a/typedapi/ml/getjobs/response.go +++ b/typedapi/ml/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getjobstats/get_job_stats.go b/typedapi/ml/getjobstats/get_job_stats.go old mode 100755 new mode 100644 index c4f9e8ae36..f15cfd6cb8 --- a/typedapi/ml/getjobstats/get_job_stats.go +++ b/typedapi/ml/getjobstats/get_job_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information for anomaly detection jobs. package getjobstats @@ -180,7 +180,6 @@ func (r GetJobStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getjobstats/response.go b/typedapi/ml/getjobstats/response.go old mode 100755 new mode 100644 index daeaeaba75..af55c95b64 --- a/typedapi/ml/getjobstats/response.go +++ b/typedapi/ml/getjobstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getjobstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getmemorystats/get_memory_stats.go b/typedapi/ml/getmemorystats/get_memory_stats.go old mode 100755 new mode 100644 index 05916bae22..e2e2d9e383 --- a/typedapi/ml/getmemorystats/get_memory_stats.go +++ b/typedapi/ml/getmemorystats/get_memory_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information on how ML is using memory. package getmemorystats @@ -180,7 +180,6 @@ func (r GetMemoryStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getmemorystats/response.go b/typedapi/ml/getmemorystats/response.go old mode 100755 new mode 100644 index d3cfc83658..0dadc1ae33 --- a/typedapi/ml/getmemorystats/response.go +++ b/typedapi/ml/getmemorystats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getmemorystats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmemorystats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go old mode 100755 new mode 100644 index 52019354dc..e59648adad --- a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go +++ b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about model snapshots. package getmodelsnapshots @@ -226,7 +226,6 @@ func (r GetModelSnapshots) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getmodelsnapshots/request.go b/typedapi/ml/getmodelsnapshots/request.go old mode 100755 new mode 100644 index c464b2df41..ad5721356c --- a/typedapi/ml/getmodelsnapshots/request.go +++ b/typedapi/ml/getmodelsnapshots/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getmodelsnapshots @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 type Request struct { // Desc Refer to the description for the `desc` query parameter. diff --git a/typedapi/ml/getmodelsnapshots/response.go b/typedapi/ml/getmodelsnapshots/response.go old mode 100755 new mode 100644 index 9157ca36d7..3fc22fc0ef --- a/typedapi/ml/getmodelsnapshots/response.go +++ b/typedapi/ml/getmodelsnapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getmodelsnapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go old mode 100755 new mode 100644 index da204765eb..d6d0c88428 --- a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Gets stats for anomaly detection job model snapshot upgrades that are in // progress. @@ -187,7 +187,6 @@ func (r GetModelSnapshotUpgradeStats) Do(ctx context.Context) (*Response, error) } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getmodelsnapshotupgradestats/response.go b/typedapi/ml/getmodelsnapshotupgradestats/response.go old mode 100755 new mode 100644 index 13572939b8..00deb842f4 --- a/typedapi/ml/getmodelsnapshotupgradestats/response.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getmodelsnapshotupgradestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshotupgradestats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getoverallbuckets/get_overall_buckets.go b/typedapi/ml/getoverallbuckets/get_overall_buckets.go old mode 100755 new mode 100644 index 85c8488696..387283bb38 --- a/typedapi/ml/getoverallbuckets/get_overall_buckets.go +++ b/typedapi/ml/getoverallbuckets/get_overall_buckets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves overall bucket results that summarize the bucket results of // multiple anomaly detection jobs. @@ -212,7 +212,6 @@ func (r GetOverallBuckets) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getoverallbuckets/request.go b/typedapi/ml/getoverallbuckets/request.go old mode 100755 new mode 100644 index ab68fb3ce3..0c91822278 --- a/typedapi/ml/getoverallbuckets/request.go +++ b/typedapi/ml/getoverallbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getoverallbuckets @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. diff --git a/typedapi/ml/getoverallbuckets/response.go b/typedapi/ml/getoverallbuckets/response.go old mode 100755 new mode 100644 index 00c9af8a35..0263c6fb5b --- a/typedapi/ml/getoverallbuckets/response.go +++ b/typedapi/ml/getoverallbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getoverallbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/getrecords/get_records.go b/typedapi/ml/getrecords/get_records.go old mode 100755 new mode 100644 index 79b7a23b2c..037c3f1faf --- a/typedapi/ml/getrecords/get_records.go +++ b/typedapi/ml/getrecords/get_records.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves anomaly records for an anomaly detection job. package getrecords @@ -210,7 +210,6 @@ func (r GetRecords) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/getrecords/request.go b/typedapi/ml/getrecords/request.go old mode 100755 new mode 100644 index 1c2592db1a..89f9d764d8 --- a/typedapi/ml/getrecords/request.go +++ b/typedapi/ml/getrecords/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrecords @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 type Request struct { // Desc Refer to the description for the `desc` query parameter. diff --git a/typedapi/ml/getrecords/response.go b/typedapi/ml/getrecords/response.go old mode 100755 new mode 100644 index d1b406c19b..795d334c16 --- a/typedapi/ml/getrecords/response.go +++ b/typedapi/ml/getrecords/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrecords @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/ml/gettrainedmodels/get_trained_models.go b/typedapi/ml/gettrainedmodels/get_trained_models.go old mode 100755 new mode 100644 index 6783095684..0506f01a32 --- a/typedapi/ml/gettrainedmodels/get_trained_models.go +++ b/typedapi/ml/gettrainedmodels/get_trained_models.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for a trained inference model. package gettrainedmodels @@ -178,7 +178,6 @@ func (r GetTrainedModels) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/gettrainedmodels/response.go b/typedapi/ml/gettrainedmodels/response.go old mode 100755 new mode 100644 index a1fb931a55..d451cd482e --- a/typedapi/ml/gettrainedmodels/response.go +++ b/typedapi/ml/gettrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 type Response struct { Count int `json:"count"` diff --git a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go old mode 100755 new mode 100644 index d6dba4e8a4..416aab20df --- a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go +++ b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information for trained inference models. package gettrainedmodelsstats @@ -180,7 +180,6 @@ func (r GetTrainedModelsStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/gettrainedmodelsstats/response.go b/typedapi/ml/gettrainedmodelsstats/response.go old mode 100755 new mode 100644 index 488324ae30..aab606504d --- a/typedapi/ml/gettrainedmodelsstats/response.go +++ b/typedapi/ml/gettrainedmodelsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettrainedmodelsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodelsstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 type Response struct { diff --git a/typedapi/ml/infertrainedmodel/infer_trained_model.go b/typedapi/ml/infertrainedmodel/infer_trained_model.go old mode 100755 new mode 100644 index 452837216a..46701d759b --- a/typedapi/ml/infertrainedmodel/infer_trained_model.go +++ b/typedapi/ml/infertrainedmodel/infer_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evaluate a trained model. package infertrainedmodel @@ -221,7 +221,6 @@ func (r InferTrainedModel) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/infertrainedmodel/request.go b/typedapi/ml/infertrainedmodel/request.go old mode 100755 new mode 100644 index eac2fd71ff..a6ca2635c8 --- a/typedapi/ml/infertrainedmodel/request.go +++ b/typedapi/ml/infertrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package infertrainedmodel @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 type Request struct { // Docs An array of objects to pass to the model for inference. The objects should diff --git a/typedapi/ml/infertrainedmodel/response.go b/typedapi/ml/infertrainedmodel/response.go old mode 100755 new mode 100644 index a85b14f22d..c9a6252fda --- a/typedapi/ml/infertrainedmodel/response.go +++ b/typedapi/ml/infertrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package infertrainedmodel @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 type Response struct { InferenceResults []types.InferenceResponseResult `json:"inference_results"` diff --git a/typedapi/ml/info/info.go b/typedapi/ml/info/info.go old mode 100755 new mode 100644 index 9b3144e778..f83226d898 --- a/typedapi/ml/info/info.go +++ b/typedapi/ml/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns defaults and limits used by machine learning. package info @@ -159,7 +159,6 @@ func (r Info) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/info/response.go b/typedapi/ml/info/response.go old mode 100755 new mode 100644 index 7ac66bff2a..6d8230ea15 --- a/typedapi/ml/info/response.go +++ b/typedapi/ml/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/MlInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/MlInfoResponse.ts#L22-L29 type Response struct { Defaults types.Defaults `json:"defaults"` diff --git a/typedapi/ml/openjob/open_job.go b/typedapi/ml/openjob/open_job.go old mode 100755 new mode 100644 index 3204a95669..597fecb5af --- a/typedapi/ml/openjob/open_job.go +++ b/typedapi/ml/openjob/open_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Opens one or more anomaly detection jobs. package openjob @@ -207,7 +207,6 @@ func (r OpenJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/openjob/request.go b/typedapi/ml/openjob/request.go old mode 100755 new mode 100644 index 9c374d2d26..aae90c3446 --- a/typedapi/ml/openjob/request.go +++ b/typedapi/ml/openjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package openjob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 type Request struct { // Timeout Refer to the description for the `timeout` query parameter. diff --git a/typedapi/ml/openjob/response.go b/typedapi/ml/openjob/response.go old mode 100755 new mode 100644 index 02222385cb..c36dbbca71 --- a/typedapi/ml/openjob/response.go +++ b/typedapi/ml/openjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package openjob // Response holds the response body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/open_job/MlOpenJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/open_job/MlOpenJobResponse.ts#L22-L27 type Response struct { Node string `json:"node"` diff --git a/typedapi/ml/postcalendarevents/post_calendar_events.go b/typedapi/ml/postcalendarevents/post_calendar_events.go old mode 100755 new mode 100644 index bcfa8663a7..f6171ca45f --- a/typedapi/ml/postcalendarevents/post_calendar_events.go +++ b/typedapi/ml/postcalendarevents/post_calendar_events.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Posts scheduled events in a calendar. package postcalendarevents @@ -207,7 +207,6 @@ func (r PostCalendarEvents) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/postcalendarevents/request.go b/typedapi/ml/postcalendarevents/request.go old mode 100755 new mode 100644 index 0b28be1866..dc09703862 --- a/typedapi/ml/postcalendarevents/request.go +++ b/typedapi/ml/postcalendarevents/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package postcalendarevents @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 type Request struct { // Events A list of one of more scheduled events. The event’s start and end times can diff --git a/typedapi/ml/postcalendarevents/response.go b/typedapi/ml/postcalendarevents/response.go old mode 100755 new mode 100644 index 9ded5e0506..e60c6b18d1 --- a/typedapi/ml/postcalendarevents/response.go +++ b/typedapi/ml/postcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package postcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 type Response struct { Events []types.CalendarEvent `json:"events"` diff --git a/typedapi/ml/previewdatafeed/preview_datafeed.go b/typedapi/ml/previewdatafeed/preview_datafeed.go old mode 100755 new mode 100644 index c20745bc05..f89c305625 --- a/typedapi/ml/previewdatafeed/preview_datafeed.go +++ b/typedapi/ml/previewdatafeed/preview_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Previews a datafeed. package previewdatafeed @@ -197,7 +197,7 @@ func (r PreviewDatafeed) Perform(ctx context.Context) (*http.Response, error) { } // Do runs the request through the transport, handle the response and returns a previewdatafeed.Response -func (r PreviewDatafeed) Do(ctx context.Context) (*Response, error) { +func (r PreviewDatafeed) Do(ctx context.Context) (Response, error) { response := NewResponse() @@ -208,13 +208,12 @@ func (r PreviewDatafeed) Do(ctx context.Context) (*Response, error) { defer res.Body.Close() if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) + err = json.NewDecoder(res.Body).Decode(&response) if err != nil { return nil, err } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/previewdatafeed/request.go b/typedapi/ml/previewdatafeed/request.go old mode 100755 new mode 100644 index c787deacb4..4915c3476f --- a/typedapi/ml/previewdatafeed/request.go +++ b/typedapi/ml/previewdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 type Request struct { // DatafeedConfig The datafeed definition to preview. diff --git a/typedapi/ml/previewdatafeed/response.go b/typedapi/ml/previewdatafeed/response.go old mode 100755 new mode 100644 index 9057d00d57..54e902c407 --- a/typedapi/ml/previewdatafeed/response.go +++ b/typedapi/ml/previewdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewdatafeed @@ -24,14 +24,12 @@ import "encoding/json" // Response holds the response body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L22 -type Response struct { - Data []json.RawMessage `json:"data"` -} +type Response []json.RawMessage // NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} +func NewResponse() Response { + r := Response{} return r } diff --git a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go old mode 100755 new mode 100644 index 4c7e5a33a4..5476c6eb0d --- a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go +++ b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Previews that will be analyzed given a data frame analytics config. package previewdataframeanalytics @@ -218,7 +218,6 @@ func (r PreviewDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/previewdataframeanalytics/request.go b/typedapi/ml/previewdataframeanalytics/request.go old mode 100755 new mode 100644 index 2e90a2d625..04f494f93f --- a/typedapi/ml/previewdataframeanalytics/request.go +++ b/typedapi/ml/previewdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewdataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 type Request struct { // Config A data frame analytics config as described in create data frame analytics diff --git a/typedapi/ml/previewdataframeanalytics/response.go b/typedapi/ml/previewdataframeanalytics/response.go old mode 100755 new mode 100644 index 53d71642d0..6ba172b663 --- a/typedapi/ml/previewdataframeanalytics/response.go +++ b/typedapi/ml/previewdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewdataframeanalytics // Response holds the response body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 type Response struct { diff --git a/typedapi/ml/putcalendar/put_calendar.go b/typedapi/ml/putcalendar/put_calendar.go old mode 100755 new mode 100644 index 41718c5442..1d3f433ae7 --- a/typedapi/ml/putcalendar/put_calendar.go +++ b/typedapi/ml/putcalendar/put_calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates a calendar. package putcalendar @@ -205,7 +205,6 @@ func (r PutCalendar) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putcalendar/request.go b/typedapi/ml/putcalendar/request.go old mode 100755 new mode 100644 index 129d739d02..b7ff0ef489 --- a/typedapi/ml/putcalendar/request.go +++ b/typedapi/ml/putcalendar/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putcalendar @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 type Request struct { // Description A description of the calendar. diff --git a/typedapi/ml/putcalendar/response.go b/typedapi/ml/putcalendar/response.go old mode 100755 new mode 100644 index 1fedf82384..4a3186287d --- a/typedapi/ml/putcalendar/response.go +++ b/typedapi/ml/putcalendar/response.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putcalendar +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // Response holds the response body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 type Response struct { @@ -39,3 +46,51 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_id": + if err := dec.Decode(&s.CalendarId); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "job_ids": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.JobIds = append(s.JobIds, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { + return err + } + } + + } + } + return nil +} diff --git a/typedapi/ml/putcalendarjob/put_calendar_job.go b/typedapi/ml/putcalendarjob/put_calendar_job.go old mode 100755 new mode 100644 index 2beb2e16c6..f0546662e4 --- a/typedapi/ml/putcalendarjob/put_calendar_job.go +++ b/typedapi/ml/putcalendarjob/put_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds an anomaly detection job to a calendar. package putcalendarjob @@ -180,7 +180,6 @@ func (r PutCalendarJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putcalendarjob/response.go b/typedapi/ml/putcalendarjob/response.go old mode 100755 new mode 100644 index 2433e3a3ba..a606022ec2 --- a/typedapi/ml/putcalendarjob/response.go +++ b/typedapi/ml/putcalendarjob/response.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putcalendarjob +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // Response holds the response body struct for the package putcalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 type Response struct { @@ -39,3 +46,51 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_id": + if err := dec.Decode(&s.CalendarId); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "job_ids": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.JobIds = append(s.JobIds, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.JobIds); err != nil { + return err + } + } + + } + } + return nil +} diff --git a/typedapi/ml/putdatafeed/put_datafeed.go b/typedapi/ml/putdatafeed/put_datafeed.go old mode 100755 new mode 100644 index ffd8403b8e..e1c409e637 --- a/typedapi/ml/putdatafeed/put_datafeed.go +++ b/typedapi/ml/putdatafeed/put_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates a datafeed. package putdatafeed @@ -206,7 +206,6 @@ func (r PutDatafeed) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putdatafeed/request.go b/typedapi/ml/putdatafeed/request.go old mode 100755 new mode 100644 index 8ae334e20d..360c7c0ad6 --- a/typedapi/ml/putdatafeed/request.go +++ b/typedapi/ml/putdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L171 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L171 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. @@ -64,8 +64,8 @@ type Request struct { // results. If the datafeed uses // aggregations, this value must be divisible by the interval of the date // histogram aggregation. - Frequency types.Duration `json:"frequency,omitempty"` - Headers map[string][]string `json:"headers,omitempty"` + Frequency types.Duration `json:"frequency,omitempty"` + Headers types.HttpHeaders `json:"headers,omitempty"` // Indices An array of index names. Wildcards are supported. If any of the indices are // in remote clusters, the machine // learning nodes must have the `remote_cluster_client` role. @@ -98,7 +98,7 @@ type Request struct { // when there are multiple jobs running on the same node. QueryDelay types.Duration `json:"query_delay,omitempty"` // RuntimeMappings Specifies runtime fields for the datafeed search. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Specifies scripts that evaluate custom expressions and returns script fields // to the datafeed. // The detector configuration objects in a job can contain functions that use diff --git a/typedapi/ml/putdatafeed/response.go b/typedapi/ml/putdatafeed/response.go old mode 100755 new mode 100644 index 0387ea972d..f200f229f1 --- a/typedapi/ml/putdatafeed/response.go +++ b/typedapi/ml/putdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putdatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` @@ -41,7 +41,7 @@ type Response struct { MaxEmptySearches *int `json:"max_empty_searches,omitempty"` Query types.Query `json:"query"` QueryDelay types.Duration `json:"query_delay"` - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` ScriptFields map[string]types.ScriptField `json:"script_fields,omitempty"` ScrollSize int `json:"scroll_size"` } diff --git a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go old mode 100755 new mode 100644 index 0002aff2bc..5912fd38be --- a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go +++ b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates a data frame analytics job. package putdataframeanalytics @@ -207,7 +207,6 @@ func (r PutDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putdataframeanalytics/request.go b/typedapi/ml/putdataframeanalytics/request.go old mode 100755 new mode 100644 index 03584896e2..a40dbcf696 --- a/typedapi/ml/putdataframeanalytics/request.go +++ b/typedapi/ml/putdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putdataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L139 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L139 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine @@ -78,7 +78,7 @@ type Request struct { Description *string `json:"description,omitempty"` // Dest The destination configuration. Dest types.DataframeAnalyticsDestination `json:"dest"` - Headers map[string][]string `json:"headers,omitempty"` + Headers types.HttpHeaders `json:"headers,omitempty"` // MaxNumThreads The maximum number of threads to be used by the analysis. Using more // threads may decrease the time necessary to complete the analysis at the // cost of using more CPU. Note that the process may use additional threads diff --git a/typedapi/ml/putdataframeanalytics/response.go b/typedapi/ml/putdataframeanalytics/response.go old mode 100755 new mode 100644 index 32d8e05373..87d3ef2641 --- a/typedapi/ml/putdataframeanalytics/response.go +++ b/typedapi/ml/putdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` diff --git a/typedapi/ml/putfilter/put_filter.go b/typedapi/ml/putfilter/put_filter.go old mode 100755 new mode 100644 index 13d422083f..30c4b94c3f --- a/typedapi/ml/putfilter/put_filter.go +++ b/typedapi/ml/putfilter/put_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates a filter. package putfilter @@ -205,7 +205,6 @@ func (r PutFilter) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putfilter/request.go b/typedapi/ml/putfilter/request.go old mode 100755 new mode 100644 index ca510fe1be..d13f7ee7d1 --- a/typedapi/ml/putfilter/request.go +++ b/typedapi/ml/putfilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putfilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 type Request struct { // Description A description of the filter. diff --git a/typedapi/ml/putfilter/response.go b/typedapi/ml/putfilter/response.go old mode 100755 new mode 100644 index 3c331866ae..98acc44f0b --- a/typedapi/ml/putfilter/response.go +++ b/typedapi/ml/putfilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putfilter // Response holds the response body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` diff --git a/typedapi/ml/putjob/put_job.go b/typedapi/ml/putjob/put_job.go old mode 100755 new mode 100644 index 88bc2d1af4..055837f999 --- a/typedapi/ml/putjob/put_job.go +++ b/typedapi/ml/putjob/put_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates an anomaly detection job. package putjob @@ -205,7 +205,6 @@ func (r PutJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/putjob/request.go b/typedapi/ml/putjob/request.go old mode 100755 new mode 100644 index 6876be18b7..d499414623 --- a/typedapi/ml/putjob/request.go +++ b/typedapi/ml/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putjob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there diff --git a/typedapi/ml/putjob/response.go b/typedapi/ml/putjob/response.go old mode 100755 new mode 100644 index 868a43f5c3..4bb7491ccc --- a/typedapi/ml/putjob/response.go +++ b/typedapi/ml/putjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putjob @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` diff --git a/typedapi/ml/puttrainedmodel/put_trained_model.go b/typedapi/ml/puttrainedmodel/put_trained_model.go old mode 100755 new mode 100644 index 9014ce9344..d38a54162c --- a/typedapi/ml/puttrainedmodel/put_trained_model.go +++ b/typedapi/ml/puttrainedmodel/put_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates an inference trained model. package puttrainedmodel @@ -206,7 +206,6 @@ func (r PutTrainedModel) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/puttrainedmodel/request.go b/typedapi/ml/puttrainedmodel/request.go old mode 100755 new mode 100644 index 1f47709223..72b6232934 --- a/typedapi/ml/puttrainedmodel/request.go +++ b/typedapi/ml/puttrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodel @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L28-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L28-L94 type Request struct { // CompressedDefinition The compressed (GZipped and Base64 encoded) inference definition of the diff --git a/typedapi/ml/puttrainedmodel/response.go b/typedapi/ml/puttrainedmodel/response.go old mode 100755 new mode 100644 index 7fd5acd253..628ba4ab14 --- a/typedapi/ml/puttrainedmodel/response.go +++ b/typedapi/ml/puttrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodel @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 type Response struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` @@ -43,6 +43,8 @@ type Response struct { EstimatedHeapMemoryUsageBytes *int `json:"estimated_heap_memory_usage_bytes,omitempty"` // EstimatedOperations The estimated number of operations to use the trained model. EstimatedOperations *int `json:"estimated_operations,omitempty"` + // FullyDefined True if the full model definition is present. + FullyDefined *bool `json:"fully_defined,omitempty"` // InferenceConfig The default configuration for inference. This can be either a regression, // classification, or one of the many NLP focused configurations. It must match // the underlying definition.trained_model's target_type. diff --git a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go old mode 100755 new mode 100644 index 96feaa1e81..ede328ba95 --- a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go +++ b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new model alias (or reassigns an existing one) to refer to the // trained model @@ -189,7 +189,6 @@ func (r PutTrainedModelAlias) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/puttrainedmodelalias/response.go b/typedapi/ml/puttrainedmodelalias/response.go old mode 100755 new mode 100644 index 39bf1b49a6..d75216e5a3 --- a/typedapi/ml/puttrainedmodelalias/response.go +++ b/typedapi/ml/puttrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodelalias // Response holds the response body struct for the package puttrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go old mode 100755 new mode 100644 index 4b6d2673a9..aefc23fd4d --- a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates part of a trained model definition package puttrainedmodeldefinitionpart @@ -215,7 +215,6 @@ func (r PutTrainedModelDefinitionPart) Do(ctx context.Context) (*Response, error } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/request.go b/typedapi/ml/puttrainedmodeldefinitionpart/request.go old mode 100755 new mode 100644 index 0b978a3385..7963cc5de5 --- a/typedapi/ml/puttrainedmodeldefinitionpart/request.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodeldefinitionpart @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 type Request struct { // Definition The definition part for the model. Must be a base64 encoded string. diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/response.go b/typedapi/ml/puttrainedmodeldefinitionpart/response.go old mode 100755 new mode 100644 index d878ebe54e..cc27369b51 --- a/typedapi/ml/puttrainedmodeldefinitionpart/response.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodeldefinitionpart // Response holds the response body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go old mode 100755 new mode 100644 index 67e880c8da..d1f79b2056 --- a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go +++ b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a trained model vocabulary package puttrainedmodelvocabulary @@ -207,7 +207,6 @@ func (r PutTrainedModelVocabulary) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/puttrainedmodelvocabulary/request.go b/typedapi/ml/puttrainedmodelvocabulary/request.go old mode 100755 new mode 100644 index 304196f3ff..c6f8694976 --- a/typedapi/ml/puttrainedmodelvocabulary/request.go +++ b/typedapi/ml/puttrainedmodelvocabulary/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodelvocabulary @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L23-L51 type Request struct { // Merges The optional model merges if required by the tokenizer. diff --git a/typedapi/ml/puttrainedmodelvocabulary/response.go b/typedapi/ml/puttrainedmodelvocabulary/response.go old mode 100755 new mode 100644 index cfd2223278..d558c687da --- a/typedapi/ml/puttrainedmodelvocabulary/response.go +++ b/typedapi/ml/puttrainedmodelvocabulary/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttrainedmodelvocabulary // Response holds the response body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/resetjob/reset_job.go b/typedapi/ml/resetjob/reset_job.go old mode 100755 new mode 100644 index 1bb8c5be34..0065dabb49 --- a/typedapi/ml/resetjob/reset_job.go +++ b/typedapi/ml/resetjob/reset_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Resets an existing anomaly detection job. package resetjob @@ -173,7 +173,6 @@ func (r ResetJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/resetjob/response.go b/typedapi/ml/resetjob/response.go old mode 100755 new mode 100644 index 6d56b49448..c08ba7ecb7 --- a/typedapi/ml/resetjob/response.go +++ b/typedapi/ml/resetjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resetjob // Response holds the response body struct for the package resetjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/revertmodelsnapshot/request.go b/typedapi/ml/revertmodelsnapshot/request.go old mode 100755 new mode 100644 index bdb677821d..8d70f00b52 --- a/typedapi/ml/revertmodelsnapshot/request.go +++ b/typedapi/ml/revertmodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package revertmodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 type Request struct { // DeleteInterveningResults Refer to the description for the `delete_intervening_results` query diff --git a/typedapi/ml/revertmodelsnapshot/response.go b/typedapi/ml/revertmodelsnapshot/response.go old mode 100755 new mode 100644 index 07931d437c..62905e1033 --- a/typedapi/ml/revertmodelsnapshot/response.go +++ b/typedapi/ml/revertmodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package revertmodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 type Response struct { Model types.ModelSnapshot `json:"model"` diff --git a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go old mode 100755 new mode 100644 index 48d44bf66b..5b9e096d04 --- a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go +++ b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Reverts to a specific snapshot. package revertmodelsnapshot @@ -218,7 +218,6 @@ func (r RevertModelSnapshot) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/setupgrademode/response.go b/typedapi/ml/setupgrademode/response.go old mode 100755 new mode 100644 index ab4917c7fa..80e1cc0043 --- a/typedapi/ml/setupgrademode/response.go +++ b/typedapi/ml/setupgrademode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package setupgrademode // Response holds the response body struct for the package setupgrademode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/setupgrademode/set_upgrade_mode.go b/typedapi/ml/setupgrademode/set_upgrade_mode.go old mode 100755 new mode 100644 index 4a425846da..b23f1bd599 --- a/typedapi/ml/setupgrademode/set_upgrade_mode.go +++ b/typedapi/ml/setupgrademode/set_upgrade_mode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Sets a cluster wide upgrade_mode setting that prepares machine learning // indices for an upgrade. @@ -162,7 +162,6 @@ func (r SetUpgradeMode) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/startdatafeed/request.go b/typedapi/ml/startdatafeed/request.go old mode 100755 new mode 100644 index 5ed8d9681d..1ba4021c57 --- a/typedapi/ml/startdatafeed/request.go +++ b/typedapi/ml/startdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package startdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 type Request struct { // End Refer to the description for the `end` query parameter. diff --git a/typedapi/ml/startdatafeed/response.go b/typedapi/ml/startdatafeed/response.go old mode 100755 new mode 100644 index 4f4ce66a89..206d28a036 --- a/typedapi/ml/startdatafeed/response.go +++ b/typedapi/ml/startdatafeed/response.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package startdatafeed +import ( + "bytes" + "encoding/json" + "errors" + "io" + "strconv" +) + // Response holds the response body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 type Response struct { @@ -40,3 +48,52 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Node = append(s.Node, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Node); err != nil { + return err + } + } + + case "started": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Started = value + case bool: + s.Started = v + } + + } + } + return nil +} diff --git a/typedapi/ml/startdatafeed/start_datafeed.go b/typedapi/ml/startdatafeed/start_datafeed.go old mode 100755 new mode 100644 index b21af6fea2..2ec8a009bb --- a/typedapi/ml/startdatafeed/start_datafeed.go +++ b/typedapi/ml/startdatafeed/start_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts one or more datafeeds. package startdatafeed @@ -207,7 +207,6 @@ func (r StartDatafeed) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/startdataframeanalytics/response.go b/typedapi/ml/startdataframeanalytics/response.go old mode 100755 new mode 100644 index d2f775aa6e..0de9ad2f99 --- a/typedapi/ml/startdataframeanalytics/response.go +++ b/typedapi/ml/startdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package startdataframeanalytics // Response holds the response body struct for the package startdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go old mode 100755 new mode 100644 index 80776464f2..b075de71c7 --- a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go +++ b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts a data frame analytics job. package startdataframeanalytics @@ -180,7 +180,6 @@ func (r StartDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/starttrainedmodeldeployment/response.go b/typedapi/ml/starttrainedmodeldeployment/response.go old mode 100755 new mode 100644 index 1c01d76309..7cdcc75fde --- a/typedapi/ml/starttrainedmodeldeployment/response.go +++ b/typedapi/ml/starttrainedmodeldeployment/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package starttrainedmodeldeployment @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package starttrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 type Response struct { Assignment types.TrainedModelAssignment `json:"assignment"` diff --git a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go old mode 100755 new mode 100644 index 19c924d954..fc027fc3e5 --- a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go +++ b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Start a trained model deployment. package starttrainedmodeldeployment @@ -184,7 +184,6 @@ func (r StartTrainedModelDeployment) Do(ctx context.Context) (*Response, error) } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/stopdatafeed/request.go b/typedapi/ml/stopdatafeed/request.go old mode 100755 new mode 100644 index a8a1e672d0..fb37b8635e --- a/typedapi/ml/stopdatafeed/request.go +++ b/typedapi/ml/stopdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stopdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. diff --git a/typedapi/ml/stopdatafeed/response.go b/typedapi/ml/stopdatafeed/response.go old mode 100755 new mode 100644 index a673ad8df8..fc3e34e293 --- a/typedapi/ml/stopdatafeed/response.go +++ b/typedapi/ml/stopdatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stopdatafeed // Response holds the response body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` diff --git a/typedapi/ml/stopdatafeed/stop_datafeed.go b/typedapi/ml/stopdatafeed/stop_datafeed.go old mode 100755 new mode 100644 index 7be97b6b54..14b4dbd34c --- a/typedapi/ml/stopdatafeed/stop_datafeed.go +++ b/typedapi/ml/stopdatafeed/stop_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops one or more datafeeds. package stopdatafeed @@ -208,7 +208,6 @@ func (r StopDatafeed) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/stopdataframeanalytics/response.go b/typedapi/ml/stopdataframeanalytics/response.go old mode 100755 new mode 100644 index 527e7458fa..132b7b10dd --- a/typedapi/ml/stopdataframeanalytics/response.go +++ b/typedapi/ml/stopdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stopdataframeanalytics // Response holds the response body struct for the package stopdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` diff --git a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go old mode 100755 new mode 100644 index 7ff25d6214..9995b91a83 --- a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go +++ b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops one or more data frame analytics jobs. package stopdataframeanalytics @@ -181,7 +181,6 @@ func (r StopDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/stoptrainedmodeldeployment/response.go b/typedapi/ml/stoptrainedmodeldeployment/response.go old mode 100755 new mode 100644 index 5b5c896db2..7e32875366 --- a/typedapi/ml/stoptrainedmodeldeployment/response.go +++ b/typedapi/ml/stoptrainedmodeldeployment/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stoptrainedmodeldeployment // Response holds the response body struct for the package stoptrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` diff --git a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go old mode 100755 new mode 100644 index b42b33e061..dd3f75096b --- a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go +++ b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stop a trained model deployment. package stoptrainedmodeldeployment @@ -181,7 +181,6 @@ func (r StopTrainedModelDeployment) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/updatedatafeed/request.go b/typedapi/ml/updatedatafeed/request.go old mode 100755 new mode 100644 index 76c1d1e713..5aeef9a344 --- a/typedapi/ml/updatedatafeed/request.go +++ b/typedapi/ml/updatedatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatedatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations @@ -101,7 +101,7 @@ type Request struct { // when there are multiple jobs running on the same node. QueryDelay types.Duration `json:"query_delay,omitempty"` // RuntimeMappings Specifies runtime fields for the datafeed search. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Specifies scripts that evaluate custom expressions and returns script fields // to the datafeed. // The detector configuration objects in a job can contain functions that use diff --git a/typedapi/ml/updatedatafeed/response.go b/typedapi/ml/updatedatafeed/response.go old mode 100755 new mode 100644 index 13f97ade99..1046bbf9cd --- a/typedapi/ml/updatedatafeed/response.go +++ b/typedapi/ml/updatedatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatedatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` @@ -41,7 +41,7 @@ type Response struct { MaxEmptySearches *int `json:"max_empty_searches,omitempty"` Query types.Query `json:"query"` QueryDelay types.Duration `json:"query_delay"` - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` ScriptFields map[string]types.ScriptField `json:"script_fields,omitempty"` ScrollSize int `json:"scroll_size"` } diff --git a/typedapi/ml/updatedatafeed/update_datafeed.go b/typedapi/ml/updatedatafeed/update_datafeed.go old mode 100755 new mode 100644 index 9b2cbedd4a..79b0cd7e38 --- a/typedapi/ml/updatedatafeed/update_datafeed.go +++ b/typedapi/ml/updatedatafeed/update_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates certain properties of a datafeed. package updatedatafeed @@ -208,7 +208,6 @@ func (r UpdateDatafeed) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/updatedataframeanalytics/request.go b/typedapi/ml/updatedataframeanalytics/request.go old mode 100755 new mode 100644 index 96e42a31ac..f4dd68c311 --- a/typedapi/ml/updatedataframeanalytics/request.go +++ b/typedapi/ml/updatedataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatedataframeanalytics @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine diff --git a/typedapi/ml/updatedataframeanalytics/response.go b/typedapi/ml/updatedataframeanalytics/response.go old mode 100755 new mode 100644 index cd3e36643f..a121800a5d --- a/typedapi/ml/updatedataframeanalytics/response.go +++ b/typedapi/ml/updatedataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatedataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` diff --git a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go old mode 100755 new mode 100644 index f4cea74017..c7d55498ab --- a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go +++ b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates certain properties of a data frame analytics job. package updatedataframeanalytics @@ -209,7 +209,6 @@ func (r UpdateDataFrameAnalytics) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/updatefilter/request.go b/typedapi/ml/updatefilter/request.go old mode 100755 new mode 100644 index 937401e350..39edc8ae9d --- a/typedapi/ml/updatefilter/request.go +++ b/typedapi/ml/updatefilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatefilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 type Request struct { // AddItems The items to add to the filter. diff --git a/typedapi/ml/updatefilter/response.go b/typedapi/ml/updatefilter/response.go old mode 100755 new mode 100644 index 5948693273..018d4b2bb5 --- a/typedapi/ml/updatefilter/response.go +++ b/typedapi/ml/updatefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatefilter // Response holds the response body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` diff --git a/typedapi/ml/updatefilter/update_filter.go b/typedapi/ml/updatefilter/update_filter.go old mode 100755 new mode 100644 index aeed4a2579..99ab5e9b8e --- a/typedapi/ml/updatefilter/update_filter.go +++ b/typedapi/ml/updatefilter/update_filter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the description of a filter, adds items, or removes items. package updatefilter @@ -207,7 +207,6 @@ func (r UpdateFilter) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/updatejob/request.go b/typedapi/ml/updatejob/request.go old mode 100755 new mode 100644 index c404a9b9d5..e4fafaaecd --- a/typedapi/ml/updatejob/request.go +++ b/typedapi/ml/updatejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatejob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when diff --git a/typedapi/ml/updatejob/response.go b/typedapi/ml/updatejob/response.go old mode 100755 new mode 100644 index 3988f43f04..15b2aeb121 --- a/typedapi/ml/updatejob/response.go +++ b/typedapi/ml/updatejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` diff --git a/typedapi/ml/updatejob/update_job.go b/typedapi/ml/updatejob/update_job.go old mode 100755 new mode 100644 index 0d1aa04bf0..379079628b --- a/typedapi/ml/updatejob/update_job.go +++ b/typedapi/ml/updatejob/update_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates certain properties of an anomaly detection job. package updatejob @@ -207,7 +207,6 @@ func (r UpdateJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/updatemodelsnapshot/request.go b/typedapi/ml/updatemodelsnapshot/request.go old mode 100755 new mode 100644 index f4bf60349e..c748f5c483 --- a/typedapi/ml/updatemodelsnapshot/request.go +++ b/typedapi/ml/updatemodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatemodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 type Request struct { // Description A description of the model snapshot. diff --git a/typedapi/ml/updatemodelsnapshot/response.go b/typedapi/ml/updatemodelsnapshot/response.go old mode 100755 new mode 100644 index f742df88b6..8c2144b99a --- a/typedapi/ml/updatemodelsnapshot/response.go +++ b/typedapi/ml/updatemodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatemodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go old mode 100755 new mode 100644 index b8ce48c990..56e1ba7f62 --- a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go +++ b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates certain properties of a snapshot. package updatemodelsnapshot @@ -217,7 +217,6 @@ func (r UpdateModelSnapshot) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/upgradejobsnapshot/response.go b/typedapi/ml/upgradejobsnapshot/response.go old mode 100755 new mode 100644 index 9315762109..3ff9f7a8fe --- a/typedapi/ml/upgradejobsnapshot/response.go +++ b/typedapi/ml/upgradejobsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package upgradejobsnapshot // Response holds the response body struct for the package upgradejobsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L29 type Response struct { diff --git a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go old mode 100755 new mode 100644 index 61c33429fd..1c85704378 --- a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go +++ b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Upgrades a given job snapshot to the current major version. package upgradejobsnapshot @@ -183,7 +183,6 @@ func (r UpgradeJobSnapshot) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/validate/request.go b/typedapi/ml/validate/request.go old mode 100755 new mode 100644 index 9363402b64..141d5f7278 --- a/typedapi/ml/validate/request.go +++ b/typedapi/ml/validate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package validate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/validate/MlValidateJobRequest.ts#L27-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/validate/MlValidateJobRequest.ts#L27-L45 type Request struct { AnalysisConfig *types.AnalysisConfig `json:"analysis_config,omitempty"` AnalysisLimits *types.AnalysisLimits `json:"analysis_limits,omitempty"` diff --git a/typedapi/ml/validate/response.go b/typedapi/ml/validate/response.go old mode 100755 new mode 100644 index 3be242f153..bce16fa0d1 --- a/typedapi/ml/validate/response.go +++ b/typedapi/ml/validate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package validate // Response holds the response body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/validate/validate.go b/typedapi/ml/validate/validate.go old mode 100755 new mode 100644 index d0fddc249b..8a1aa92c7b --- a/typedapi/ml/validate/validate.go +++ b/typedapi/ml/validate/validate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Validates an anomaly detection job. package validate @@ -196,7 +196,6 @@ func (r Validate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ml/validatedetector/response.go b/typedapi/ml/validatedetector/response.go old mode 100755 new mode 100644 index 8abaaea6b9..579fef0e90 --- a/typedapi/ml/validatedetector/response.go +++ b/typedapi/ml/validatedetector/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package validatedetector // Response holds the response body struct for the package validatedetector // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/ml/validatedetector/validate_detector.go b/typedapi/ml/validatedetector/validate_detector.go old mode 100755 new mode 100644 index 9d89a228ca..b737c21702 --- a/typedapi/ml/validatedetector/validate_detector.go +++ b/typedapi/ml/validatedetector/validate_detector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Validates an anomaly detection detector. package validatedetector @@ -198,7 +198,6 @@ func (r ValidateDetector) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go old mode 100755 new mode 100644 index 2d8b5894c3..87aef900a3 --- a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes the archived repositories metering information present in the // cluster. @@ -180,7 +180,6 @@ func (r ClearRepositoriesMeteringArchive) Do(ctx context.Context) (*Response, er } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go old mode 100755 new mode 100644 index 499b44e61b..084b82b9db --- a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearrepositoriesmeteringarchive @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearrepositoriesmeteringarchive // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 type Response struct { diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go old mode 100755 new mode 100644 index c46d3f6fd8..5c919b35bc --- a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns cluster repositories metering information. package getrepositoriesmeteringinfo @@ -170,7 +170,6 @@ func (r GetRepositoriesMeteringInfo) Do(ctx context.Context) (*Response, error) } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/response.go b/typedapi/nodes/getrepositoriesmeteringinfo/response.go old mode 100755 new mode 100644 index 8db732b89b..546ff928ea --- a/typedapi/nodes/getrepositoriesmeteringinfo/response.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrepositoriesmeteringinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrepositoriesmeteringinfo // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 type Response struct { diff --git a/typedapi/nodes/hotthreads/hot_threads.go b/typedapi/nodes/hotthreads/hot_threads.go old mode 100755 new mode 100644 index 12d249fd1f..bd80cbd011 --- a/typedapi/nodes/hotthreads/hot_threads.go +++ b/typedapi/nodes/hotthreads/hot_threads.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about hot threads on each node in the cluster. package hotthreads @@ -178,7 +178,6 @@ func (r HotThreads) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/hotthreads/response.go b/typedapi/nodes/hotthreads/response.go old mode 100755 new mode 100644 index cc6aca83da..07b6dcae72 --- a/typedapi/nodes/hotthreads/response.go +++ b/typedapi/nodes/hotthreads/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package hotthreads @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hotthreads // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 type Response struct { HotThreads []types.HotThread `json:"hot_threads"` diff --git a/typedapi/nodes/info/info.go b/typedapi/nodes/info/info.go old mode 100755 new mode 100644 index 29436a7e01..3f7658c2da --- a/typedapi/nodes/info/info.go +++ b/typedapi/nodes/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about nodes in the cluster. package info @@ -194,7 +194,6 @@ func (r Info) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/info/response.go b/typedapi/nodes/info/response.go old mode 100755 new mode 100644 index 192b9cf922..8934f3a2de --- a/typedapi/nodes/info/response.go +++ b/typedapi/nodes/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/NodesInfoResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/NodesInfoResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go old mode 100755 new mode 100644 index 4e6ee4b427..f7982c6b60 --- a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go +++ b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Reloads secure settings. package reloadsecuresettings @@ -210,7 +210,6 @@ func (r ReloadSecureSettings) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/reloadsecuresettings/request.go b/typedapi/nodes/reloadsecuresettings/request.go old mode 100755 new mode 100644 index ffdd3f1ce9..4bfb1fb45f --- a/typedapi/nodes/reloadsecuresettings/request.go +++ b/typedapi/nodes/reloadsecuresettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reloadsecuresettings @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L39 type Request struct { SecureSettingsPassword *string `json:"secure_settings_password,omitempty"` } diff --git a/typedapi/nodes/reloadsecuresettings/response.go b/typedapi/nodes/reloadsecuresettings/response.go old mode 100755 new mode 100644 index 12fbe1fe73..6a9178897d --- a/typedapi/nodes/reloadsecuresettings/response.go +++ b/typedapi/nodes/reloadsecuresettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package reloadsecuresettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/nodes/stats/response.go b/typedapi/nodes/stats/response.go old mode 100755 new mode 100644 index e6789c08c6..8aabcb2bc7 --- a/typedapi/nodes/stats/response.go +++ b/typedapi/nodes/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 type Response struct { ClusterName *string `json:"cluster_name,omitempty"` diff --git a/typedapi/nodes/stats/stats.go b/typedapi/nodes/stats/stats.go old mode 100755 new mode 100644 index a00262ea04..f0ba9173c1 --- a/typedapi/nodes/stats/stats.go +++ b/typedapi/nodes/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns statistical information about nodes in the cluster. package stats @@ -236,7 +236,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/nodes/usage/response.go b/typedapi/nodes/usage/response.go old mode 100755 new mode 100644 index ae3739ec29..ecb207e84b --- a/typedapi/nodes/usage/response.go +++ b/typedapi/nodes/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/nodes/usage/usage.go b/typedapi/nodes/usage/usage.go old mode 100755 new mode 100644 index c325325cee..8ab4bead1a --- a/typedapi/nodes/usage/usage.go +++ b/typedapi/nodes/usage/usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns low-level information about REST actions usage on nodes. package usage @@ -201,7 +201,6 @@ func (r Usage) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/deletejob/delete_job.go b/typedapi/rollup/deletejob/delete_job.go old mode 100755 new mode 100644 index beecce062e..e8dae0fae4 --- a/typedapi/rollup/deletejob/delete_job.go +++ b/typedapi/rollup/deletejob/delete_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing rollup job. package deletejob @@ -170,7 +170,6 @@ func (r DeleteJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/deletejob/response.go b/typedapi/rollup/deletejob/response.go old mode 100755 new mode 100644 index 6654cbf1c6..511c49e22e --- a/typedapi/rollup/deletejob/response.go +++ b/typedapi/rollup/deletejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/rollup/getjobs/get_jobs.go b/typedapi/rollup/getjobs/get_jobs.go old mode 100755 new mode 100644 index 9c75b0b7f3..d1fbd8880a --- a/typedapi/rollup/getjobs/get_jobs.go +++ b/typedapi/rollup/getjobs/get_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the configuration, stats, and status of rollup jobs. package getjobs @@ -112,7 +112,7 @@ func (r *GetJobs) HttpRequest(ctx context.Context) (*http.Request, error) { path.WriteString("_rollup") path.WriteString("/") path.WriteString("job") - path.WriteString("/") + method = http.MethodGet } @@ -175,7 +175,6 @@ func (r GetJobs) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/getjobs/response.go b/typedapi/rollup/getjobs/response.go old mode 100755 new mode 100644 index 83622013c4..f66f1dac8d --- a/typedapi/rollup/getjobs/response.go +++ b/typedapi/rollup/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 type Response struct { Jobs []types.RollupJob `json:"jobs"` diff --git a/typedapi/rollup/getrollupcaps/get_rollup_caps.go b/typedapi/rollup/getrollupcaps/get_rollup_caps.go old mode 100755 new mode 100644 index 04d572099a..33a39bff37 --- a/typedapi/rollup/getrollupcaps/get_rollup_caps.go +++ b/typedapi/rollup/getrollupcaps/get_rollup_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the capabilities of any rollup jobs that have been configured for a // specific index or index pattern. @@ -114,7 +114,7 @@ func (r *GetRollupCaps) HttpRequest(ctx context.Context) (*http.Request, error) path.WriteString("_rollup") path.WriteString("/") path.WriteString("data") - path.WriteString("/") + method = http.MethodGet } @@ -177,7 +177,6 @@ func (r GetRollupCaps) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/getrollupcaps/response.go b/typedapi/rollup/getrollupcaps/response.go old mode 100755 new mode 100644 index c504073dde..e7fa27c813 --- a/typedapi/rollup/getrollupcaps/response.go +++ b/typedapi/rollup/getrollupcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrollupcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupcaps // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L26 type Response map[string]types.RollupCapabilities diff --git a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go old mode 100755 new mode 100644 index d05530a844..a0a0b8b177 --- a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go +++ b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the rollup capabilities of all jobs inside of a rollup index (e.g. // the index where rollup data is stored). @@ -172,7 +172,6 @@ func (r GetRollupIndexCaps) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/getrollupindexcaps/response.go b/typedapi/rollup/getrollupindexcaps/response.go old mode 100755 new mode 100644 index f5cc2de0ba..2a8bd8c992 --- a/typedapi/rollup/getrollupindexcaps/response.go +++ b/typedapi/rollup/getrollupindexcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrollupindexcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupindexcaps // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L26 type Response map[string]types.IndexCapabilities diff --git a/typedapi/rollup/putjob/put_job.go b/typedapi/rollup/putjob/put_job.go old mode 100755 new mode 100644 index 63c0e7c198..3156484d68 --- a/typedapi/rollup/putjob/put_job.go +++ b/typedapi/rollup/putjob/put_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a rollup job. package putjob @@ -205,7 +205,6 @@ func (r PutJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/putjob/request.go b/typedapi/rollup/putjob/request.go old mode 100755 new mode 100644 index f2899594c4..d32710696f --- a/typedapi/rollup/putjob/request.go +++ b/typedapi/rollup/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putjob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 type Request struct { // Cron A cron string which defines the intervals when the rollup job should be @@ -52,8 +52,8 @@ type Request struct { // aggregations might be used. Rollups provide // enough flexibility that you simply need to determine which fields are needed, // not in what order they are needed. - Groups types.Groupings `json:"groups"` - Headers map[string][]string `json:"headers,omitempty"` + Groups types.Groupings `json:"groups"` + Headers types.HttpHeaders `json:"headers,omitempty"` // IndexPattern The index or index pattern to roll up. Supports wildcard-style patterns // (`logstash-*`). The job attempts to // rollup the entire index or index-pattern. diff --git a/typedapi/rollup/putjob/response.go b/typedapi/rollup/putjob/response.go old mode 100755 new mode 100644 index 299d4c8bec..777c7b113c --- a/typedapi/rollup/putjob/response.go +++ b/typedapi/rollup/putjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putjob // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/rollup/rollupsearch/request.go b/typedapi/rollup/rollupsearch/request.go old mode 100755 new mode 100644 index 7b68be9ff8..5568818622 --- a/typedapi/rollup/rollupsearch/request.go +++ b/typedapi/rollup/rollupsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rollupsearch @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L47 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Query *types.Query `json:"query,omitempty"` diff --git a/typedapi/rollup/rollupsearch/response.go b/typedapi/rollup/rollupsearch/response.go old mode 100755 new mode 100644 index aeae1cbdf7..98db77648e --- a/typedapi/rollup/rollupsearch/response.go +++ b/typedapi/rollup/rollupsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package rollupsearch @@ -25,6 +25,7 @@ import ( "encoding/json" "errors" "io" + "strconv" "strings" "github.com/elastic/go-elasticsearch/v8/typedapi/types" @@ -32,7 +33,7 @@ import ( // Response holds the response body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` @@ -66,6 +67,10 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -78,415 +83,494 @@ func (s *Response) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } switch elems[0] { + case "cardinality": o := types.NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := types.NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := types.NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := types.NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := types.NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := types.NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := types.NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := types.NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := types.NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := types.NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := types.NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := types.NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := types.NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := types.NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := types.NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := types.NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := types.NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := types.NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := types.NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := types.NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := types.NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := types.NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := types.NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := types.NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := types.NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := types.NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := types.NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := types.NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := types.NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := types.NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := types.NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := types.NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := types.NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := types.NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := types.NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := types.NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := types.NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := types.NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := types.NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := types.NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := types.NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := types.NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := types.NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := types.NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := types.NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := types.NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := types.NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := types.NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := types.NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := types.NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := types.NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := types.NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := types.NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := types.NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := types.NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := types.NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := types.NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := types.NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := types.NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := types.NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := types.NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := types.NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := types.NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := types.NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := types.NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := types.NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := types.NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := types.NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -518,18 +602,46 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/rollup/rollupsearch/rollup_search.go b/typedapi/rollup/rollupsearch/rollup_search.go old mode 100755 new mode 100644 index 2a2863a0e4..9515940a0a --- a/typedapi/rollup/rollupsearch/rollup_search.go +++ b/typedapi/rollup/rollupsearch/rollup_search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Enables searching rolled-up data using the standard query DSL. package rollupsearch @@ -206,7 +206,6 @@ func (r RollupSearch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/startjob/response.go b/typedapi/rollup/startjob/response.go old mode 100755 new mode 100644 index 074deda924..e5af667ca8 --- a/typedapi/rollup/startjob/response.go +++ b/typedapi/rollup/startjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package startjob // Response holds the response body struct for the package startjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 type Response struct { Started bool `json:"started"` diff --git a/typedapi/rollup/startjob/start_job.go b/typedapi/rollup/startjob/start_job.go old mode 100755 new mode 100644 index cfcba10b78..d907cc4882 --- a/typedapi/rollup/startjob/start_job.go +++ b/typedapi/rollup/startjob/start_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts an existing, stopped rollup job. package startjob @@ -172,7 +172,6 @@ func (r StartJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/rollup/stopjob/response.go b/typedapi/rollup/stopjob/response.go old mode 100755 new mode 100644 index 812301036d..27ad385bdb --- a/typedapi/rollup/stopjob/response.go +++ b/typedapi/rollup/stopjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stopjob // Response holds the response body struct for the package stopjob // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` diff --git a/typedapi/rollup/stopjob/stop_job.go b/typedapi/rollup/stopjob/stop_job.go old mode 100755 new mode 100644 index e13da8854f..f643785df5 --- a/typedapi/rollup/stopjob/stop_job.go +++ b/typedapi/rollup/stopjob/stop_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops an existing, started rollup job. package stopjob @@ -173,7 +173,6 @@ func (r StopJob) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/searchablesnapshots/cachestats/cache_stats.go b/typedapi/searchablesnapshots/cachestats/cache_stats.go old mode 100755 new mode 100644 index 3363133499..c48ac35d9f --- a/typedapi/searchablesnapshots/cachestats/cache_stats.go +++ b/typedapi/searchablesnapshots/cachestats/cache_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieve node-level cache statistics about searchable snapshots. package cachestats @@ -179,7 +179,6 @@ func (r CacheStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/searchablesnapshots/cachestats/response.go b/typedapi/searchablesnapshots/cachestats/response.go old mode 100755 new mode 100644 index c498242475..f8f65f0a6b --- a/typedapi/searchablesnapshots/cachestats/response.go +++ b/typedapi/searchablesnapshots/cachestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package cachestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cachestats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 type Response struct { Nodes map[string]types.Node `json:"nodes"` diff --git a/typedapi/searchablesnapshots/clearcache/clear_cache.go b/typedapi/searchablesnapshots/clearcache/clear_cache.go old mode 100755 new mode 100644 index 0f00a51742..63f9e441a7 --- a/typedapi/searchablesnapshots/clearcache/clear_cache.go +++ b/typedapi/searchablesnapshots/clearcache/clear_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clear the cache of searchable snapshots. package clearcache @@ -180,7 +180,6 @@ func (r ClearCache) Do(ctx context.Context) (Response, error) { } return *response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/searchablesnapshots/clearcache/response.go b/typedapi/searchablesnapshots/clearcache/response.go old mode 100755 new mode 100644 index 8ef79e947f..36aa803120 --- a/typedapi/searchablesnapshots/clearcache/response.go +++ b/typedapi/searchablesnapshots/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcache @@ -24,6 +24,10 @@ import "encoding/json" // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L24 -type Response json.RawMessage +type Response = json.RawMessage + +func NewResponse() *Response { + return new(Response) +} diff --git a/typedapi/searchablesnapshots/mount/mount.go b/typedapi/searchablesnapshots/mount/mount.go old mode 100755 new mode 100644 index d040854659..aefc2a6851 --- a/typedapi/searchablesnapshots/mount/mount.go +++ b/typedapi/searchablesnapshots/mount/mount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Mount a snapshot as a searchable index. package mount @@ -214,7 +214,6 @@ func (r Mount) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/searchablesnapshots/mount/request.go b/typedapi/searchablesnapshots/mount/request.go old mode 100755 new mode 100644 index bcdf9af4d2..e5babfc7f0 --- a/typedapi/searchablesnapshots/mount/request.go +++ b/typedapi/searchablesnapshots/mount/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mount @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L50 type Request struct { IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` Index string `json:"index"` diff --git a/typedapi/searchablesnapshots/mount/response.go b/typedapi/searchablesnapshots/mount/response.go old mode 100755 new mode 100644 index 98e62c272b..91bc4a40d4 --- a/typedapi/searchablesnapshots/mount/response.go +++ b/typedapi/searchablesnapshots/mount/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package mount @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 type Response struct { Snapshot types.MountedSnapshot `json:"snapshot"` diff --git a/typedapi/searchablesnapshots/stats/response.go b/typedapi/searchablesnapshots/stats/response.go old mode 100755 new mode 100644 index 3236ff8bda..942c865c2a --- a/typedapi/searchablesnapshots/stats/response.go +++ b/typedapi/searchablesnapshots/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -24,7 +24,7 @@ import "encoding/json" // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 type Response struct { Stats json.RawMessage `json:"stats,omitempty"` diff --git a/typedapi/searchablesnapshots/stats/stats.go b/typedapi/searchablesnapshots/stats/stats.go old mode 100755 new mode 100644 index 3a5b7e957d..7eadaebfca --- a/typedapi/searchablesnapshots/stats/stats.go +++ b/typedapi/searchablesnapshots/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieve shard-level statistics about searchable snapshots. package stats @@ -177,7 +177,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/searchapplication/delete/delete.go b/typedapi/searchapplication/delete/delete.go new file mode 100644 index 0000000000..b083e33bb4 --- /dev/null +++ b/typedapi/searchapplication/delete/delete.go @@ -0,0 +1,219 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Deletes a search application. +package delete + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + nameMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Delete struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + paramSet int + + name string +} + +// NewDelete type alias for index. +type NewDelete func(name string) *Delete + +// NewDeleteFunc returns a new instance of Delete with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewDeleteFunc(tp elastictransport.Interface) NewDelete { + return func(name string) *Delete { + n := New(tp) + + n.Name(name) + + return n + } +} + +// Deletes a search application. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/master/put-search-application.html +func New(tp elastictransport.Interface) *Delete { + r := &Delete{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Delete) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == nameMask: + path.WriteString("/") + path.WriteString("_application") + path.WriteString("/") + path.WriteString("search_application") + path.WriteString("/") + + path.WriteString(r.name) + + method = http.MethodDelete + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Delete) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the Delete query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a delete.Response +func (r Delete) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Delete) IsSuccess(ctx context.Context) (bool, error) { + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + return false, nil +} + +// Header set a key, value pair in the Delete headers map. +func (r *Delete) Header(key, value string) *Delete { + r.headers.Set(key, value) + + return r +} + +// Name The name of the search application to delete +// API Name: name +func (r *Delete) Name(v string) *Delete { + r.paramSet |= nameMask + r.name = v + + return r +} diff --git a/typedapi/cluster/deletevotingconfigexclusions/response.go b/typedapi/searchapplication/delete/response.go old mode 100755 new mode 100644 similarity index 69% rename from typedapi/cluster/deletevotingconfigexclusions/response.go rename to typedapi/searchapplication/delete/response.go index d5192fd91b..916b842dbe --- a/typedapi/cluster/deletevotingconfigexclusions/response.go +++ b/typedapi/searchapplication/delete/response.go @@ -16,15 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package deletevotingconfigexclusions +package delete -// Response holds the response body struct for the package deletevotingconfigexclusions +// Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/delete_voting_config_exclusions/ClusterDeleteVotingConfigExclusionsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/delete/SearchApplicationsDeleteResponse.ts#L22-L24 type Response struct { + + // Acknowledged For a successful response, this value is always true. On failure, an + // exception is returned instead. + Acknowledged bool `json:"acknowledged"` } // NewResponse returns a Response diff --git a/typedapi/searchapplication/get/get.go b/typedapi/searchapplication/get/get.go new file mode 100644 index 0000000000..6e334628d8 --- /dev/null +++ b/typedapi/searchapplication/get/get.go @@ -0,0 +1,219 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Returns the details about a search application. +package get + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + nameMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Get struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + paramSet int + + name string +} + +// NewGet type alias for index. +type NewGet func(name string) *Get + +// NewGetFunc returns a new instance of Get with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewGetFunc(tp elastictransport.Interface) NewGet { + return func(name string) *Get { + n := New(tp) + + n.Name(name) + + return n + } +} + +// Returns the details about a search application. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/master/get-search-application.html +func New(tp elastictransport.Interface) *Get { + r := &Get{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Get) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == nameMask: + path.WriteString("/") + path.WriteString("_application") + path.WriteString("/") + path.WriteString("search_application") + path.WriteString("/") + + path.WriteString(r.name) + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Get) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the Get query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a get.Response +func (r Get) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Get) IsSuccess(ctx context.Context) (bool, error) { + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + return false, nil +} + +// Header set a key, value pair in the Get headers map. +func (r *Get) Header(key, value string) *Get { + r.headers.Set(key, value) + + return r +} + +// Name The name of the search application +// API Name: name +func (r *Get) Name(v string) *Get { + r.paramSet |= nameMask + r.name = v + + return r +} diff --git a/typedapi/searchapplication/get/response.go b/typedapi/searchapplication/get/response.go new file mode 100644 index 0000000000..2dafbf03b0 --- /dev/null +++ b/typedapi/searchapplication/get/response.go @@ -0,0 +1,49 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package get + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package get +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/get/SearchApplicationsGetResponse.ts#L22-L24 + +type Response struct { + + // AnalyticsCollectionName Analytics collection associated to the Search Application + AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` + // Indices Indices that are part of the Search Application + Indices []string `json:"indices"` + // Name Search Application name + Name string `json:"name"` + // Template Search template to use on search operations + Template *types.SearchApplicationTemplate `json:"template,omitempty"` + // UpdatedAtMillis Last time the Search Application was updated + UpdatedAtMillis int64 `json:"updated_at_millis"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/searchapplication/list/list.go b/typedapi/searchapplication/list/list.go new file mode 100644 index 0000000000..7ccb944d40 --- /dev/null +++ b/typedapi/searchapplication/list/list.go @@ -0,0 +1,224 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Returns the existing search applications. +package list + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type List struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + paramSet int +} + +// NewList type alias for index. +type NewList func() *List + +// NewListFunc returns a new instance of List with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewListFunc(tp elastictransport.Interface) NewList { + return func() *List { + n := New(tp) + + return n + } +} + +// Returns the existing search applications. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/master/list-search-applications.html +func New(tp elastictransport.Interface) *List { + r := &List{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *List) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_application") + path.WriteString("/") + path.WriteString("search_application") + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r List) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the List query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a list.Response +func (r List) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r List) IsSuccess(ctx context.Context) (bool, error) { + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + return false, nil +} + +// Header set a key, value pair in the List headers map. +func (r *List) Header(key, value string) *List { + r.headers.Set(key, value) + + return r +} + +// Q Query in the Lucene query string syntax" +// API name: q +func (r *List) Q(v string) *List { + r.values.Set("q", v) + + return r +} + +// From Starting offset (default: 0) +// API name: from +func (r *List) From(i int) *List { + r.values.Set("from", strconv.Itoa(i)) + + return r +} + +// Size specifies a max number of results to get +// API name: size +func (r *List) Size(i int) *List { + r.values.Set("size", strconv.Itoa(i)) + + return r +} diff --git a/typedapi/searchapplication/list/response.go b/typedapi/searchapplication/list/response.go new file mode 100644 index 0000000000..7afe33a8e2 --- /dev/null +++ b/typedapi/searchapplication/list/response.go @@ -0,0 +1,40 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package list + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package list +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/list/SearchApplicationsListResponse.ts#L24-L29 + +type Response struct { + Count int64 `json:"count"` + Results []types.SearchApplicationListItem `json:"results"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/searchapplication/put/put.go b/typedapi/searchapplication/put/put.go new file mode 100644 index 0000000000..40f4c6dad6 --- /dev/null +++ b/typedapi/searchapplication/put/put.go @@ -0,0 +1,243 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Creates or updates a search application. +package put + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + nameMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Put struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + req *types.SearchApplication + raw io.Reader + + paramSet int + + name string +} + +// NewPut type alias for index. +type NewPut func(name string) *Put + +// NewPutFunc returns a new instance of Put with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewPutFunc(tp elastictransport.Interface) NewPut { + return func(name string) *Put { + n := New(tp) + + n.Name(name) + + return n + } +} + +// Creates or updates a search application. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/master/put-search-application.html +func New(tp elastictransport.Interface) *Put { + r := &Put{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// Raw takes a json payload as input which is then passed to the http.Request +// If specified Raw takes precedence on Request method. +func (r *Put) Raw(raw io.Reader) *Put { + r.raw = raw + + return r +} + +// Request allows to set the request property with the appropriate payload. +func (r *Put) Request(req *types.SearchApplication) *Put { + r.req = req + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Put) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + if r.raw != nil { + r.buf.ReadFrom(r.raw) + } else if r.req != nil { + data, err := json.Marshal(r.req) + + if err != nil { + return nil, fmt.Errorf("could not serialise request for Put: %w", err) + } + + r.buf.Write(data) + } + + r.path.Scheme = "http" + + switch { + case r.paramSet == nameMask: + path.WriteString("/") + path.WriteString("_application") + path.WriteString("/") + path.WriteString("search_application") + path.WriteString("/") + + path.WriteString(r.name) + + method = http.MethodPut + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.buf.Len() > 0 { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Put) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the Put query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a put.Response +func (r Put) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// Header set a key, value pair in the Put headers map. +func (r *Put) Header(key, value string) *Put { + r.headers.Set(key, value) + + return r +} + +// Name The name of the search application to be created or updated +// API Name: name +func (r *Put) Name(v string) *Put { + r.paramSet |= nameMask + r.name = v + + return r +} + +// Create If true, requires that a search application with the specified resource_id +// does not already exist. (default: false) +// API name: create +func (r *Put) Create(b bool) *Put { + r.values.Set("create", strconv.FormatBool(b)) + + return r +} diff --git a/typedapi/cluster/existscomponenttemplate/response.go b/typedapi/searchapplication/put/response.go old mode 100755 new mode 100644 similarity index 67% rename from typedapi/cluster/existscomponenttemplate/response.go rename to typedapi/searchapplication/put/response.go index 4b4f885cbf..08267d6146 --- a/typedapi/cluster/existscomponenttemplate/response.go +++ b/typedapi/searchapplication/put/response.go @@ -16,15 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package existscomponenttemplate +package put -// Response holds the response body struct for the package existscomponenttemplate +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/result" +) + +// Response holds the response body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/exists_component_template/ClusterComponentTemplateExistsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/put/SearchApplicationsPutResponse.ts#L22-L26 type Response struct { + Result result.Result `json:"result"` } // NewResponse returns a Response diff --git a/typedapi/searchapplication/search/request.go b/typedapi/searchapplication/search/request.go new file mode 100644 index 0000000000..16ffe03a63 --- /dev/null +++ b/typedapi/searchapplication/search/request.go @@ -0,0 +1,53 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package search + +import ( + "encoding/json" + "fmt" +) + +// Request holds the request body struct for the package search +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/search/SearchApplicationsSearchRequest.ts#L24-L40 +type Request struct { + Params map[string]json.RawMessage `json:"params,omitempty"` +} + +// NewRequest returns a Request +func NewRequest() *Request { + r := &Request{ + Params: make(map[string]json.RawMessage, 0), + } + return r +} + +// FromJSON allows to load an arbitrary json into the request structure +func (r *Request) FromJSON(data string) (*Request, error) { + var req Request + err := json.Unmarshal([]byte(data), &req) + + if err != nil { + return nil, fmt.Errorf("could not deserialise json into Search request: %w", err) + } + + return &req, nil +} diff --git a/typedapi/searchapplication/search/response.go b/typedapi/searchapplication/search/response.go new file mode 100644 index 0000000000..4f91ff1d93 --- /dev/null +++ b/typedapi/searchapplication/search/response.go @@ -0,0 +1,727 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package search + +import ( + "bytes" + "encoding/json" + "errors" + "io" + "strconv" + "strings" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package search +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/search/SearchApplicationsSearchResponse.ts#L23-L25 + +type Response struct { + Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` + Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` + Fields map[string]json.RawMessage `json:"fields,omitempty"` + Hits types.HitsMetadata `json:"hits"` + MaxScore *types.Float64 `json:"max_score,omitempty"` + NumReducePhases *int64 `json:"num_reduce_phases,omitempty"` + PitId *string `json:"pit_id,omitempty"` + Profile *types.Profile `json:"profile,omitempty"` + ScrollId_ *string `json:"_scroll_id,omitempty"` + Shards_ types.ShardStatistics `json:"_shards"` + Suggest map[string][]types.Suggest `json:"suggest,omitempty"` + TerminatedEarly *bool `json:"terminated_early,omitempty"` + TimedOut bool `json:"timed_out"` + Took int64 `json:"took"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{ + Aggregations: make(map[string]types.Aggregate, 0), + Fields: make(map[string]json.RawMessage, 0), + Suggest: make(map[string][]types.Suggest, 0), + } + return r +} + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + + for dec.More() { + tt, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + if value, ok := tt.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := types.NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := types.NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := types.NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := types.NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := types.NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := types.NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := types.NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := types.NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := types.NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := types.NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := types.NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := types.NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := types.NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := types.NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := types.NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := types.NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := types.NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := types.NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := types.NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := types.NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := types.NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := types.NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := types.NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := types.NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := types.NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := types.NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := types.NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := types.NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := types.NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := types.NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := types.NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := types.NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := types.NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := types.NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := types.NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := types.NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := types.NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := types.NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := types.NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := types.NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := types.NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := types.NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := types.NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := types.NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := types.NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := types.NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := types.NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := types.NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := types.NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := types.NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := types.NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := types.NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := types.NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := types.NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := types.NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := types.NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := types.NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := types.NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := types.NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := types.NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := types.NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := types.NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := types.NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := types.NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := types.NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := types.NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := types.NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := types.NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := types.NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } + + case "_clusters": + if err := dec.Decode(&s.Clusters_); err != nil { + return err + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "hits": + if err := dec.Decode(&s.Hits); err != nil { + return err + } + + case "max_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := types.Float64(value) + s.MaxScore = &f + case float64: + f := types.Float64(v) + s.MaxScore = &f + } + + case "num_reduce_phases": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f + } + + case "pit_id": + if err := dec.Decode(&s.PitId); err != nil { + return err + } + + case "profile": + if err := dec.Decode(&s.Profile); err != nil { + return err + } + + case "_scroll_id": + if err := dec.Decode(&s.ScrollId_); err != nil { + return err + } + + case "_shards": + if err := dec.Decode(&s.Shards_); err != nil { + return err + } + + case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]types.Suggest, 0) + } + if err := dec.Decode(&s.Suggest); err != nil { + return err + } + + case "terminated_early": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v + } + + case "timed_out": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v + } + + case "took": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f + } + + } + } + return nil +} diff --git a/typedapi/searchapplication/search/search.go b/typedapi/searchapplication/search/search.go new file mode 100644 index 0000000000..b3fd132810 --- /dev/null +++ b/typedapi/searchapplication/search/search.go @@ -0,0 +1,235 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Perform a search against a search application +package search + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + nameMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Search struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + req *Request + raw io.Reader + + paramSet int + + name string +} + +// NewSearch type alias for index. +type NewSearch func(name string) *Search + +// NewSearchFunc returns a new instance of Search with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewSearchFunc(tp elastictransport.Interface) NewSearch { + return func(name string) *Search { + n := New(tp) + + n.Name(name) + + return n + } +} + +// Perform a search against a search application +// +// https://www.elastic.co/guide/en/elasticsearch/reference/master/search-application-search.html +func New(tp elastictransport.Interface) *Search { + r := &Search{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// Raw takes a json payload as input which is then passed to the http.Request +// If specified Raw takes precedence on Request method. +func (r *Search) Raw(raw io.Reader) *Search { + r.raw = raw + + return r +} + +// Request allows to set the request property with the appropriate payload. +func (r *Search) Request(req *Request) *Search { + r.req = req + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Search) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + if r.raw != nil { + r.buf.ReadFrom(r.raw) + } else if r.req != nil { + data, err := json.Marshal(r.req) + + if err != nil { + return nil, fmt.Errorf("could not serialise request for Search: %w", err) + } + + r.buf.Write(data) + } + + r.path.Scheme = "http" + + switch { + case r.paramSet == nameMask: + path.WriteString("/") + path.WriteString("_application") + path.WriteString("/") + path.WriteString("search_application") + path.WriteString("/") + + path.WriteString(r.name) + path.WriteString("/") + path.WriteString("_search") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.buf.Len() > 0 { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Search) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the Search query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a search.Response +func (r Search) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// Header set a key, value pair in the Search headers map. +func (r *Search) Header(key, value string) *Search { + r.headers.Set(key, value) + + return r +} + +// Name The name of the search application to be searched +// API Name: name +func (r *Search) Name(v string) *Search { + r.paramSet |= nameMask + r.name = v + + return r +} diff --git a/typedapi/security/activateuserprofile/activate_user_profile.go b/typedapi/security/activateuserprofile/activate_user_profile.go old mode 100755 new mode 100644 index 9616123999..6a6b3f0adc --- a/typedapi/security/activateuserprofile/activate_user_profile.go +++ b/typedapi/security/activateuserprofile/activate_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates the user profile on behalf of another user. package activateuserprofile @@ -196,7 +196,6 @@ func (r ActivateUserProfile) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/activateuserprofile/request.go b/typedapi/security/activateuserprofile/request.go old mode 100755 new mode 100644 index 67673c2df0..9daf3716c0 --- a/typedapi/security/activateuserprofile/request.go +++ b/typedapi/security/activateuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package activateuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/activate_user_profile/Request.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/activate_user_profile/Request.ts#L23-L37 type Request struct { AccessToken *string `json:"access_token,omitempty"` GrantType granttype.GrantType `json:"grant_type"` diff --git a/typedapi/security/activateuserprofile/response.go b/typedapi/security/activateuserprofile/response.go old mode 100755 new mode 100644 index ce9792f633..eb6812e049 --- a/typedapi/security/activateuserprofile/response.go +++ b/typedapi/security/activateuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package activateuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/activate_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/activate_user_profile/Response.ts#L22-L24 type Response struct { Doc_ types.UserProfileHitMetadata `json:"_doc"` diff --git a/typedapi/security/authenticate/authenticate.go b/typedapi/security/authenticate/authenticate.go old mode 100755 new mode 100644 index 11aa82e2c4..b6e38fb8cf --- a/typedapi/security/authenticate/authenticate.go +++ b/typedapi/security/authenticate/authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Enables authentication as a user and retrieve information about the // authenticated user. @@ -161,7 +161,6 @@ func (r Authenticate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/authenticate/response.go b/typedapi/security/authenticate/response.go old mode 100755 new mode 100644 index 7c93c9db08..ab1881ea5b --- a/typedapi/security/authenticate/response.go +++ b/typedapi/security/authenticate/response.go @@ -16,32 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package authenticate import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package authenticate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L40 type Response struct { - ApiKey *types.ApiKey `json:"api_key,omitempty"` - AuthenticationRealm types.RealmInfo `json:"authentication_realm"` - AuthenticationType string `json:"authentication_type"` - Email string `json:"email,omitempty"` - Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` - LookupRealm types.RealmInfo `json:"lookup_realm"` - Metadata map[string]json.RawMessage `json:"metadata"` - Roles []string `json:"roles"` - Token *types.AuthenticateToken `json:"token,omitempty"` - Username string `json:"username"` + ApiKey *types.ApiKey `json:"api_key,omitempty"` + AuthenticationRealm types.RealmInfo `json:"authentication_realm"` + AuthenticationType string `json:"authentication_type"` + Email string `json:"email,omitempty"` + Enabled bool `json:"enabled"` + FullName string `json:"full_name,omitempty"` + LookupRealm types.RealmInfo `json:"lookup_realm"` + Metadata types.Metadata `json:"metadata"` + Roles []string `json:"roles"` + Token *types.AuthenticateToken `json:"token,omitempty"` + Username string `json:"username"` } // NewResponse returns a Response diff --git a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go old mode 100755 new mode 100644 index 1d38d58a35..b7f6a857ed --- a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go +++ b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates the attributes of multiple existing API keys. package bulkupdateapikeys diff --git a/typedapi/security/changepassword/change_password.go b/typedapi/security/changepassword/change_password.go old mode 100755 new mode 100644 index 5dc33f49b1..8cdd702b1e --- a/typedapi/security/changepassword/change_password.go +++ b/typedapi/security/changepassword/change_password.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Changes the passwords of users in the native realm and built-in users. package changepassword @@ -216,7 +216,6 @@ func (r ChangePassword) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/changepassword/request.go b/typedapi/security/changepassword/request.go old mode 100755 new mode 100644 index 82c8aa8422..49c75ecff1 --- a/typedapi/security/changepassword/request.go +++ b/typedapi/security/changepassword/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package changepassword @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L52 type Request struct { // Password The new password value. Passwords must be at least 6 characters long. diff --git a/typedapi/security/changepassword/response.go b/typedapi/security/changepassword/response.go old mode 100755 new mode 100644 index 8514b2d1f3..d82d7ff3da --- a/typedapi/security/changepassword/response.go +++ b/typedapi/security/changepassword/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package changepassword // Response holds the response body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/clearapikeycache/clear_api_key_cache.go b/typedapi/security/clearapikeycache/clear_api_key_cache.go old mode 100755 new mode 100644 index 66e59eac3a..cd347b4e22 --- a/typedapi/security/clearapikeycache/clear_api_key_cache.go +++ b/typedapi/security/clearapikeycache/clear_api_key_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clear a subset or all entries from the API key cache. package clearapikeycache @@ -172,7 +172,6 @@ func (r ClearApiKeyCache) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/clearapikeycache/response.go b/typedapi/security/clearapikeycache/response.go old mode 100755 new mode 100644 index 9093f0b9bf..5d3b19c772 --- a/typedapi/security/clearapikeycache/response.go +++ b/typedapi/security/clearapikeycache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearapikeycache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearapikeycache // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go old mode 100755 new mode 100644 index 0882013232..465621770e --- a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go +++ b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evicts application privileges from the native application privileges cache. package clearcachedprivileges @@ -172,7 +172,6 @@ func (r ClearCachedPrivileges) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/clearcachedprivileges/response.go b/typedapi/security/clearcachedprivileges/response.go old mode 100755 new mode 100644 index 840d920680..06de58d965 --- a/typedapi/security/clearcachedprivileges/response.go +++ b/typedapi/security/clearcachedprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcachedprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/security/clearcachedrealms/clear_cached_realms.go b/typedapi/security/clearcachedrealms/clear_cached_realms.go old mode 100755 new mode 100644 index fadfd159b3..e113e0c0f4 --- a/typedapi/security/clearcachedrealms/clear_cached_realms.go +++ b/typedapi/security/clearcachedrealms/clear_cached_realms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evicts users from the user cache. Can completely clear the cache or evict // specific users. @@ -174,7 +174,6 @@ func (r ClearCachedRealms) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/clearcachedrealms/response.go b/typedapi/security/clearcachedrealms/response.go old mode 100755 new mode 100644 index 77f8276d70..531924b1fa --- a/typedapi/security/clearcachedrealms/response.go +++ b/typedapi/security/clearcachedrealms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcachedrealms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedrealms // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/security/clearcachedroles/clear_cached_roles.go b/typedapi/security/clearcachedroles/clear_cached_roles.go old mode 100755 new mode 100644 index 0acb7283f7..3bcfe35413 --- a/typedapi/security/clearcachedroles/clear_cached_roles.go +++ b/typedapi/security/clearcachedroles/clear_cached_roles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evicts roles from the native role cache. package clearcachedroles @@ -172,7 +172,6 @@ func (r ClearCachedRoles) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/clearcachedroles/response.go b/typedapi/security/clearcachedroles/response.go old mode 100755 new mode 100644 index dec4dfd171..0634ccb775 --- a/typedapi/security/clearcachedroles/response.go +++ b/typedapi/security/clearcachedroles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcachedroles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedroles // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go old mode 100755 new mode 100644 index 7a13a4c092..c16f63029b --- a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go +++ b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Evicts tokens from the service account token caches. package clearcachedservicetokens @@ -192,7 +192,6 @@ func (r ClearCachedServiceTokens) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/clearcachedservicetokens/response.go b/typedapi/security/clearcachedservicetokens/response.go old mode 100755 new mode 100644 index fbb891b88f..07f012bf5c --- a/typedapi/security/clearcachedservicetokens/response.go +++ b/typedapi/security/clearcachedservicetokens/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcachedservicetokens @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedservicetokens // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/security/createapikey/create_api_key.go b/typedapi/security/createapikey/create_api_key.go old mode 100755 new mode 100644 index 408533d7b4..c6b6ab8b96 --- a/typedapi/security/createapikey/create_api_key.go +++ b/typedapi/security/createapikey/create_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates an API key for access without requiring basic authentication. package createapikey @@ -196,7 +196,6 @@ func (r CreateApiKey) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/createapikey/request.go b/typedapi/security/createapikey/request.go old mode 100755 new mode 100644 index 5a5c1533f1..5733bbf911 --- a/typedapi/security/createapikey/request.go +++ b/typedapi/security/createapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createapikey @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L51 type Request struct { // Expiration Expiration time for the API key. By default, API keys never expire. @@ -37,7 +37,7 @@ type Request struct { // Metadata Arbitrary metadata that you want to associate with the API key. It supports // nested data structure. Within the metadata object, keys beginning with _ are // reserved for system usage. - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` // Name Specifies the name for this API key. Name *string `json:"name,omitempty"` // RoleDescriptors An array of role descriptors for this API key. This parameter is optional. diff --git a/typedapi/security/createapikey/response.go b/typedapi/security/createapikey/response.go old mode 100755 new mode 100644 index 7a643cbd22..d1d0898c21 --- a/typedapi/security/createapikey/response.go +++ b/typedapi/security/createapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createapikey // Response holds the response body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L49 type Response struct { diff --git a/typedapi/security/createservicetoken/create_service_token.go b/typedapi/security/createservicetoken/create_service_token.go old mode 100755 new mode 100644 index d0ea46c303..63e9a537cb --- a/typedapi/security/createservicetoken/create_service_token.go +++ b/typedapi/security/createservicetoken/create_service_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a service account token for access without requiring basic // authentication. @@ -209,7 +209,6 @@ func (r CreateServiceToken) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/createservicetoken/response.go b/typedapi/security/createservicetoken/response.go old mode 100755 new mode 100644 index 13400f28fa..a2d18f8271 --- a/typedapi/security/createservicetoken/response.go +++ b/typedapi/security/createservicetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createservicetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package createservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 type Response struct { Created bool `json:"created"` diff --git a/typedapi/security/deleteprivileges/delete_privileges.go b/typedapi/security/deleteprivileges/delete_privileges.go old mode 100755 new mode 100644 index 50c27a0c0e..851cd877d9 --- a/typedapi/security/deleteprivileges/delete_privileges.go +++ b/typedapi/security/deleteprivileges/delete_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes application privileges. package deleteprivileges @@ -180,7 +180,6 @@ func (r DeletePrivileges) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/deleteprivileges/response.go b/typedapi/security/deleteprivileges/response.go old mode 100755 new mode 100644 index d4c21ca042..e3d0655fac --- a/typedapi/security/deleteprivileges/response.go +++ b/typedapi/security/deleteprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deleteprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.FoundStatus diff --git a/typedapi/security/deleterole/delete_role.go b/typedapi/security/deleterole/delete_role.go old mode 100755 new mode 100644 index 4914f39b30..bc7cc3cd48 --- a/typedapi/security/deleterole/delete_role.go +++ b/typedapi/security/deleterole/delete_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes roles in the native realm. package deleterole @@ -172,7 +172,6 @@ func (r DeleteRole) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/deleterole/response.go b/typedapi/security/deleterole/response.go old mode 100755 new mode 100644 index 9e14d00e2c..42398f2bbb --- a/typedapi/security/deleterole/response.go +++ b/typedapi/security/deleterole/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleterole // Response holds the response body struct for the package deleterole // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` diff --git a/typedapi/security/deleterolemapping/delete_role_mapping.go b/typedapi/security/deleterolemapping/delete_role_mapping.go old mode 100755 new mode 100644 index 0791fc0e43..8054e29fda --- a/typedapi/security/deleterolemapping/delete_role_mapping.go +++ b/typedapi/security/deleterolemapping/delete_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes role mappings. package deleterolemapping @@ -172,7 +172,6 @@ func (r DeleteRoleMapping) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/deleterolemapping/response.go b/typedapi/security/deleterolemapping/response.go old mode 100755 new mode 100644 index 1408018ffc..37b6548fba --- a/typedapi/security/deleterolemapping/response.go +++ b/typedapi/security/deleterolemapping/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleterolemapping // Response holds the response body struct for the package deleterolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` diff --git a/typedapi/security/deleteservicetoken/delete_service_token.go b/typedapi/security/deleteservicetoken/delete_service_token.go old mode 100755 new mode 100644 index e6e9935328..95765d8b06 --- a/typedapi/security/deleteservicetoken/delete_service_token.go +++ b/typedapi/security/deleteservicetoken/delete_service_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a service account token. package deleteservicetoken @@ -192,7 +192,6 @@ func (r DeleteServiceToken) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/deleteservicetoken/response.go b/typedapi/security/deleteservicetoken/response.go old mode 100755 new mode 100644 index 97364d05c0..35eae088a7 --- a/typedapi/security/deleteservicetoken/response.go +++ b/typedapi/security/deleteservicetoken/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteservicetoken // Response holds the response body struct for the package deleteservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` diff --git a/typedapi/security/deleteuser/delete_user.go b/typedapi/security/deleteuser/delete_user.go old mode 100755 new mode 100644 index e46b00d8d9..1d73c28366 --- a/typedapi/security/deleteuser/delete_user.go +++ b/typedapi/security/deleteuser/delete_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes users from the native realm. package deleteuser @@ -172,7 +172,6 @@ func (r DeleteUser) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/deleteuser/response.go b/typedapi/security/deleteuser/response.go old mode 100755 new mode 100644 index 9d0c7f2da1..39650b3f38 --- a/typedapi/security/deleteuser/response.go +++ b/typedapi/security/deleteuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteuser // Response holds the response body struct for the package deleteuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` diff --git a/typedapi/security/disableuser/disable_user.go b/typedapi/security/disableuser/disable_user.go old mode 100755 new mode 100644 index 52b2b0eef7..424d4b3efc --- a/typedapi/security/disableuser/disable_user.go +++ b/typedapi/security/disableuser/disable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Disables users in the native realm. package disableuser @@ -174,7 +174,6 @@ func (r DisableUser) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/disableuser/response.go b/typedapi/security/disableuser/response.go old mode 100755 new mode 100644 index 15665e0abf..5b7c2decdd --- a/typedapi/security/disableuser/response.go +++ b/typedapi/security/disableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package disableuser // Response holds the response body struct for the package disableuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/disableuserprofile/disable_user_profile.go b/typedapi/security/disableuserprofile/disable_user_profile.go old mode 100755 new mode 100644 index f922fed5a2..65d9df071c --- a/typedapi/security/disableuserprofile/disable_user_profile.go +++ b/typedapi/security/disableuserprofile/disable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Disables a user profile so it's not visible in user profile searches. package disableuserprofile @@ -174,7 +174,6 @@ func (r DisableUserProfile) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/disableuserprofile/response.go b/typedapi/security/disableuserprofile/response.go old mode 100755 new mode 100644 index fe05e80fb3..fa021b39ca --- a/typedapi/security/disableuserprofile/response.go +++ b/typedapi/security/disableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package disableuserprofile // Response holds the response body struct for the package disableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/disable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/disable_user_profile/Response.ts#L22-L24 type Response struct { diff --git a/typedapi/security/enableuser/enable_user.go b/typedapi/security/enableuser/enable_user.go old mode 100755 new mode 100644 index e95e49f343..431687f54f --- a/typedapi/security/enableuser/enable_user.go +++ b/typedapi/security/enableuser/enable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Enables users in the native realm. package enableuser @@ -174,7 +174,6 @@ func (r EnableUser) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/enableuser/response.go b/typedapi/security/enableuser/response.go old mode 100755 new mode 100644 index da065cb7a5..bcfecf99a9 --- a/typedapi/security/enableuser/response.go +++ b/typedapi/security/enableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package enableuser // Response holds the response body struct for the package enableuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/enableuserprofile/enable_user_profile.go b/typedapi/security/enableuserprofile/enable_user_profile.go old mode 100755 new mode 100644 index 6687422c3e..4687af84f6 --- a/typedapi/security/enableuserprofile/enable_user_profile.go +++ b/typedapi/security/enableuserprofile/enable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Enables a user profile so it's visible in user profile searches. package enableuserprofile @@ -174,7 +174,6 @@ func (r EnableUserProfile) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/enableuserprofile/response.go b/typedapi/security/enableuserprofile/response.go old mode 100755 new mode 100644 index 109f251508..89e1f714c2 --- a/typedapi/security/enableuserprofile/response.go +++ b/typedapi/security/enableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package enableuserprofile // Response holds the response body struct for the package enableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/enable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/enable_user_profile/Response.ts#L22-L24 type Response struct { diff --git a/typedapi/security/enrollkibana/enroll_kibana.go b/typedapi/security/enrollkibana/enroll_kibana.go old mode 100755 new mode 100644 index b129cf3b6c..19374ccba6 --- a/typedapi/security/enrollkibana/enroll_kibana.go +++ b/typedapi/security/enrollkibana/enroll_kibana.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows a kibana instance to configure itself to communicate with a secured // elasticsearch cluster. @@ -169,7 +169,6 @@ func (r EnrollKibana) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/enrollkibana/response.go b/typedapi/security/enrollkibana/response.go old mode 100755 new mode 100644 index 02d155a269..ff029c3ecf --- a/typedapi/security/enrollkibana/response.go +++ b/typedapi/security/enrollkibana/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package enrollkibana @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package enrollkibana // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/enroll_kibana/Response.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/enroll_kibana/Response.ts#L20-L25 type Response struct { HttpCa string `json:"http_ca"` diff --git a/typedapi/security/enrollnode/enroll_node.go b/typedapi/security/enrollnode/enroll_node.go old mode 100755 new mode 100644 index dae01aa116..5f30b0f117 --- a/typedapi/security/enrollnode/enroll_node.go +++ b/typedapi/security/enrollnode/enroll_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Allows a new node to enroll to an existing cluster with security enabled. package enrollnode @@ -167,7 +167,6 @@ func (r EnrollNode) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/enrollnode/response.go b/typedapi/security/enrollnode/response.go old mode 100755 new mode 100644 index 6a99d5b7eb..0334e28e2d --- a/typedapi/security/enrollnode/response.go +++ b/typedapi/security/enrollnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package enrollnode // Response holds the response body struct for the package enrollnode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/enroll_node/Response.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/enroll_node/Response.ts#L20-L29 type Response struct { HttpCaCert string `json:"http_ca_cert"` diff --git a/typedapi/security/getapikey/get_api_key.go b/typedapi/security/getapikey/get_api_key.go old mode 100755 new mode 100644 index 94c07d73b4..7fd6adb70d --- a/typedapi/security/getapikey/get_api_key.go +++ b/typedapi/security/getapikey/get_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information for one or more API keys. package getapikey @@ -160,7 +160,6 @@ func (r GetApiKey) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getapikey/response.go b/typedapi/security/getapikey/response.go old mode 100755 new mode 100644 index ea8af4b2b7..af3b672715 --- a/typedapi/security/getapikey/response.go +++ b/typedapi/security/getapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 type Response struct { ApiKeys []types.ApiKey `json:"api_keys"` diff --git a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go old mode 100755 new mode 100644 index fcab3739b2..17f2ce252e --- a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go +++ b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the list of cluster privileges and index privileges that are // available in this version of Elasticsearch. @@ -163,7 +163,6 @@ func (r GetBuiltinPrivileges) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getbuiltinprivileges/response.go b/typedapi/security/getbuiltinprivileges/response.go old mode 100755 new mode 100644 index d0d5904e6d..a651030b91 --- a/typedapi/security/getbuiltinprivileges/response.go +++ b/typedapi/security/getbuiltinprivileges/response.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getbuiltinprivileges +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // Response holds the response body struct for the package getbuiltinprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 type Response struct { Cluster []string `json:"cluster"` @@ -34,3 +41,43 @@ func NewResponse() *Response { r := &Response{} return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cluster": + if err := dec.Decode(&s.Cluster); err != nil { + return err + } + + case "index": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Index = append(s.Index, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { + return err + } + } + + } + } + return nil +} diff --git a/typedapi/security/getprivileges/get_privileges.go b/typedapi/security/getprivileges/get_privileges.go old mode 100755 new mode 100644 index ed79f5d01b..26cef2e7fe --- a/typedapi/security/getprivileges/get_privileges.go +++ b/typedapi/security/getprivileges/get_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves application privileges. package getprivileges @@ -191,7 +191,6 @@ func (r GetPrivileges) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getprivileges/response.go b/typedapi/security/getprivileges/response.go old mode 100755 new mode 100644 index df576f16ed..b9693b6a42 --- a/typedapi/security/getprivileges/response.go +++ b/typedapi/security/getprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.PrivilegesActions diff --git a/typedapi/security/getrole/get_role.go b/typedapi/security/getrole/get_role.go old mode 100755 new mode 100644 index 54a6987022..d0760bce16 --- a/typedapi/security/getrole/get_role.go +++ b/typedapi/security/getrole/get_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves roles in the native realm. package getrole @@ -175,7 +175,6 @@ func (r GetRole) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getrole/response.go b/typedapi/security/getrole/response.go old mode 100755 new mode 100644 index 9dbf8d35eb..6fee13e48c --- a/typedapi/security/getrole/response.go +++ b/typedapi/security/getrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrole // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L25 type Response map[string]types.Role diff --git a/typedapi/security/getrolemapping/get_role_mapping.go b/typedapi/security/getrolemapping/get_role_mapping.go old mode 100755 new mode 100644 index 1ef3db105d..e2130d666f --- a/typedapi/security/getrolemapping/get_role_mapping.go +++ b/typedapi/security/getrolemapping/get_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves role mappings. package getrolemapping @@ -175,7 +175,6 @@ func (r GetRoleMapping) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getrolemapping/response.go b/typedapi/security/getrolemapping/response.go old mode 100755 new mode 100644 index 2b9b46dfad..f02a9d144e --- a/typedapi/security/getrolemapping/response.go +++ b/typedapi/security/getrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L25 type Response map[string]types.SecurityRoleMapping diff --git a/typedapi/security/getserviceaccounts/get_service_accounts.go b/typedapi/security/getserviceaccounts/get_service_accounts.go old mode 100755 new mode 100644 index 0a2d52fc4c..d96c3f7f92 --- a/typedapi/security/getserviceaccounts/get_service_accounts.go +++ b/typedapi/security/getserviceaccounts/get_service_accounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about service accounts. package getserviceaccounts @@ -191,7 +191,6 @@ func (r GetServiceAccounts) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getserviceaccounts/response.go b/typedapi/security/getserviceaccounts/response.go old mode 100755 new mode 100644 index 8c2397418b..3faa0df223 --- a/typedapi/security/getserviceaccounts/response.go +++ b/typedapi/security/getserviceaccounts/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getserviceaccounts @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getserviceaccounts // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L25 type Response map[string]types.RoleDescriptorWrapper diff --git a/typedapi/security/getservicecredentials/get_service_credentials.go b/typedapi/security/getservicecredentials/get_service_credentials.go old mode 100755 new mode 100644 index aaf2a0095e..89f748fda5 --- a/typedapi/security/getservicecredentials/get_service_credentials.go +++ b/typedapi/security/getservicecredentials/get_service_credentials.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information of all service credentials for a service account. package getservicecredentials @@ -180,7 +180,6 @@ func (r GetServiceCredentials) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getservicecredentials/response.go b/typedapi/security/getservicecredentials/response.go old mode 100755 new mode 100644 index b393749d95..0623eb426a --- a/typedapi/security/getservicecredentials/response.go +++ b/typedapi/security/getservicecredentials/response.go @@ -16,32 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getservicecredentials import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package getservicecredentials // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 type Response struct { Count int `json:"count"` // NodesCredentials Contains service account credentials collected from all nodes of the cluster - NodesCredentials types.NodesCredentials `json:"nodes_credentials"` - ServiceAccount string `json:"service_account"` - Tokens map[string]map[string]json.RawMessage `json:"tokens"` + NodesCredentials types.NodesCredentials `json:"nodes_credentials"` + ServiceAccount string `json:"service_account"` + Tokens map[string]types.Metadata `json:"tokens"` } // NewResponse returns a Response func NewResponse() *Response { r := &Response{ - Tokens: make(map[string]map[string]json.RawMessage, 0), + Tokens: make(map[string]types.Metadata, 0), } return r } diff --git a/typedapi/security/gettoken/get_token.go b/typedapi/security/gettoken/get_token.go old mode 100755 new mode 100644 index da772a2e7d..bb6d1ed7c9 --- a/typedapi/security/gettoken/get_token.go +++ b/typedapi/security/gettoken/get_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a bearer token for access without requiring basic authentication. package gettoken @@ -196,7 +196,6 @@ func (r GetToken) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/gettoken/request.go b/typedapi/security/gettoken/request.go old mode 100755 new mode 100644 index 8234926537..2102e24f45 --- a/typedapi/security/gettoken/request.go +++ b/typedapi/security/gettoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettoken @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 type Request struct { GrantType *accesstokengranttype.AccessTokenGrantType `json:"grant_type,omitempty"` KerberosTicket *string `json:"kerberos_ticket,omitempty"` diff --git a/typedapi/security/gettoken/response.go b/typedapi/security/gettoken/response.go old mode 100755 new mode 100644 index 75e7fe0692..5b15895810 --- a/typedapi/security/gettoken/response.go +++ b/typedapi/security/gettoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 type Response struct { AccessToken string `json:"access_token"` diff --git a/typedapi/security/getuser/get_user.go b/typedapi/security/getuser/get_user.go old mode 100755 new mode 100644 index 4184d759d3..76e976982c --- a/typedapi/security/getuser/get_user.go +++ b/typedapi/security/getuser/get_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about users in the native realm and built-in users. package getuser @@ -176,7 +176,6 @@ func (r GetUser) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getuser/response.go b/typedapi/security/getuser/response.go old mode 100755 new mode 100644 index 8140b5cef2..9d3c2dae37 --- a/typedapi/security/getuser/response.go +++ b/typedapi/security/getuser/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getuser @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_user/SecurityGetUserResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_user/SecurityGetUserResponse.ts#L23-L25 type Response map[string]types.User diff --git a/typedapi/security/getuserprivileges/get_user_privileges.go b/typedapi/security/getuserprivileges/get_user_privileges.go old mode 100755 new mode 100644 index 29703a3406..370a0711b6 --- a/typedapi/security/getuserprivileges/get_user_privileges.go +++ b/typedapi/security/getuserprivileges/get_user_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves security privileges for the logged in user. package getuserprivileges @@ -161,7 +161,6 @@ func (r GetUserPrivileges) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getuserprivileges/response.go b/typedapi/security/getuserprivileges/response.go old mode 100755 new mode 100644 index 32697cb859..af336d425b --- a/typedapi/security/getuserprivileges/response.go +++ b/typedapi/security/getuserprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getuserprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 type Response struct { Applications []types.ApplicationPrivileges `json:"applications"` diff --git a/typedapi/security/getuserprofile/get_user_profile.go b/typedapi/security/getuserprofile/get_user_profile.go old mode 100755 new mode 100644 index 40bf0318c3..910576d3ab --- a/typedapi/security/getuserprofile/get_user_profile.go +++ b/typedapi/security/getuserprofile/get_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves user profiles for the given unique ID(s). package getuserprofile @@ -170,7 +170,6 @@ func (r GetUserProfile) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/getuserprofile/response.go b/typedapi/security/getuserprofile/response.go old mode 100755 new mode 100644 index d71fcc1db9..26984e2d8a --- a/typedapi/security/getuserprofile/response.go +++ b/typedapi/security/getuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_user_profile/Response.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_user_profile/Response.ts#L23-L28 type Response struct { Errors *types.GetUserProfileErrors `json:"errors,omitempty"` diff --git a/typedapi/security/grantapikey/grant_api_key.go b/typedapi/security/grantapikey/grant_api_key.go old mode 100755 new mode 100644 index 7efc4c06be..04aa42ebd0 --- a/typedapi/security/grantapikey/grant_api_key.go +++ b/typedapi/security/grantapikey/grant_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates an API key on behalf of another user. package grantapikey @@ -196,7 +196,6 @@ func (r GrantApiKey) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/grantapikey/request.go b/typedapi/security/grantapikey/request.go old mode 100755 new mode 100644 index 3b72f98f07..1fc250ea66 --- a/typedapi/security/grantapikey/request.go +++ b/typedapi/security/grantapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package grantapikey @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L38 type Request struct { AccessToken *string `json:"access_token,omitempty"` ApiKey types.GrantApiKey `json:"api_key"` diff --git a/typedapi/security/grantapikey/response.go b/typedapi/security/grantapikey/response.go old mode 100755 new mode 100644 index ec8749e61b..5b5e4ec4fb --- a/typedapi/security/grantapikey/response.go +++ b/typedapi/security/grantapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package grantapikey // Response holds the response body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 type Response struct { ApiKey string `json:"api_key"` diff --git a/typedapi/security/hasprivileges/has_privileges.go b/typedapi/security/hasprivileges/has_privileges.go old mode 100755 new mode 100644 index b8709ea05a..07d6c93ace --- a/typedapi/security/hasprivileges/has_privileges.go +++ b/typedapi/security/hasprivileges/has_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Determines whether the specified user has a specified list of privileges. package hasprivileges @@ -214,7 +214,6 @@ func (r HasPrivileges) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/hasprivileges/request.go b/typedapi/security/hasprivileges/request.go old mode 100755 new mode 100644 index dad678b71a..7827d1193f --- a/typedapi/security/hasprivileges/request.go +++ b/typedapi/security/hasprivileges/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package hasprivileges @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 type Request struct { Application []types.ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. diff --git a/typedapi/security/hasprivileges/response.go b/typedapi/security/hasprivileges/response.go old mode 100755 new mode 100644 index 22e3064ca9..dc8793ed67 --- a/typedapi/security/hasprivileges/response.go +++ b/typedapi/security/hasprivileges/response.go @@ -16,27 +16,31 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package hasprivileges +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + // Response holds the response body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 type Response struct { - Application map[string]map[string]map[string]bool `json:"application"` - Cluster map[string]bool `json:"cluster"` - HasAllRequested bool `json:"has_all_requested"` - Index map[string]map[string]bool `json:"index"` - Username string `json:"username"` + Application types.ApplicationsPrivileges `json:"application"` + Cluster map[string]bool `json:"cluster"` + HasAllRequested bool `json:"has_all_requested"` + Index map[string]types.Privileges `json:"index"` + Username string `json:"username"` } // NewResponse returns a Response func NewResponse() *Response { r := &Response{ Cluster: make(map[string]bool, 0), - Index: make(map[string]map[string]bool, 0), + Index: make(map[string]types.Privileges, 0), } return r } diff --git a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go old mode 100755 new mode 100644 index 491e7de4ba..c82d1140ed --- a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go +++ b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Determines whether the users associated with the specified profile IDs have // all the requested privileges. @@ -198,7 +198,6 @@ func (r HasPrivilegesUserProfile) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/hasprivilegesuserprofile/request.go b/typedapi/security/hasprivilegesuserprofile/request.go old mode 100755 new mode 100644 index a8ff8d1566..1c2b35ec28 --- a/typedapi/security/hasprivilegesuserprofile/request.go +++ b/typedapi/security/hasprivilegesuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package hasprivilegesuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges_user_profile/Request.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges_user_profile/Request.ts#L24-L38 type Request struct { Privileges types.PrivilegesCheck `json:"privileges"` // Uids A list of profile IDs. The privileges are checked for associated users of the diff --git a/typedapi/security/hasprivilegesuserprofile/response.go b/typedapi/security/hasprivilegesuserprofile/response.go old mode 100755 new mode 100644 index 523ec0cf5c..8494aa050d --- a/typedapi/security/hasprivilegesuserprofile/response.go +++ b/typedapi/security/hasprivilegesuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package hasprivilegesuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges_user_profile/Response.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges_user_profile/Response.ts#L23-L38 type Response struct { diff --git a/typedapi/security/invalidateapikey/invalidate_api_key.go b/typedapi/security/invalidateapikey/invalidate_api_key.go old mode 100755 new mode 100644 index 49c444bafc..5eecd6289d --- a/typedapi/security/invalidateapikey/invalidate_api_key.go +++ b/typedapi/security/invalidateapikey/invalidate_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Invalidates one or more API keys. package invalidateapikey @@ -194,7 +194,6 @@ func (r InvalidateApiKey) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/invalidateapikey/request.go b/typedapi/security/invalidateapikey/request.go old mode 100755 new mode 100644 index c8650985cd..23c71cae18 --- a/typedapi/security/invalidateapikey/request.go +++ b/typedapi/security/invalidateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package invalidateapikey @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L37 type Request struct { Id *string `json:"id,omitempty"` Ids []string `json:"ids,omitempty"` diff --git a/typedapi/security/invalidateapikey/response.go b/typedapi/security/invalidateapikey/response.go old mode 100755 new mode 100644 index 4d2445d1ea..377d7749f8 --- a/typedapi/security/invalidateapikey/response.go +++ b/typedapi/security/invalidateapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package invalidateapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 type Response struct { ErrorCount int `json:"error_count"` diff --git a/typedapi/security/invalidatetoken/invalidate_token.go b/typedapi/security/invalidatetoken/invalidate_token.go old mode 100755 new mode 100644 index 3b03f9faa6..58b9056ede --- a/typedapi/security/invalidatetoken/invalidate_token.go +++ b/typedapi/security/invalidatetoken/invalidate_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Invalidates one or more access tokens or refresh tokens. package invalidatetoken @@ -196,7 +196,6 @@ func (r InvalidateToken) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/invalidatetoken/request.go b/typedapi/security/invalidatetoken/request.go old mode 100755 new mode 100644 index 3c48468de2..1dde8ca14a --- a/typedapi/security/invalidatetoken/request.go +++ b/typedapi/security/invalidatetoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package invalidatetoken @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 type Request struct { RealmName *string `json:"realm_name,omitempty"` RefreshToken *string `json:"refresh_token,omitempty"` diff --git a/typedapi/security/invalidatetoken/response.go b/typedapi/security/invalidatetoken/response.go old mode 100755 new mode 100644 index ce7eca515e..3732e451fc --- a/typedapi/security/invalidatetoken/response.go +++ b/typedapi/security/invalidatetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package invalidatetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 type Response struct { ErrorCount int64 `json:"error_count"` diff --git a/typedapi/security/oidcauthenticate/oidc_authenticate.go b/typedapi/security/oidcauthenticate/oidc_authenticate.go old mode 100755 new mode 100644 index ef669fa716..a2287ffa31 --- a/typedapi/security/oidcauthenticate/oidc_authenticate.go +++ b/typedapi/security/oidcauthenticate/oidc_authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Exchanges an OpenID Connection authentication response message for an // Elasticsearch access token and refresh token pair diff --git a/typedapi/security/oidclogout/oidc_logout.go b/typedapi/security/oidclogout/oidc_logout.go old mode 100755 new mode 100644 index aec9d3b365..b1de641f09 --- a/typedapi/security/oidclogout/oidc_logout.go +++ b/typedapi/security/oidclogout/oidc_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Invalidates a refresh token and access token that was generated from the // OpenID Connect Authenticate API diff --git a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go old mode 100755 new mode 100644 index 9e6c3e12e7..34900fcb7d --- a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go +++ b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates an OAuth 2.0 authentication request as a URL string package oidcprepareauthentication diff --git a/typedapi/security/putprivileges/put_privileges.go b/typedapi/security/putprivileges/put_privileges.go old mode 100755 new mode 100644 index d1020e036d..82ec0190ca --- a/typedapi/security/putprivileges/put_privileges.go +++ b/typedapi/security/putprivileges/put_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds or updates application privileges. package putprivileges @@ -127,7 +127,7 @@ func (r *PutPrivileges) HttpRequest(ctx context.Context) (*http.Request, error) path.WriteString("_security") path.WriteString("/") path.WriteString("privilege") - path.WriteString("/") + method = http.MethodPut } @@ -196,7 +196,6 @@ func (r PutPrivileges) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/putprivileges/response.go b/typedapi/security/putprivileges/response.go old mode 100755 new mode 100644 index 40d9088f59..31652d43d7 --- a/typedapi/security/putprivileges/response.go +++ b/typedapi/security/putprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L25 type Response map[string]map[string]types.CreatedStatus diff --git a/typedapi/security/putrole/put_role.go b/typedapi/security/putrole/put_role.go old mode 100755 new mode 100644 index a4a6a213c0..0f2fb927ad --- a/typedapi/security/putrole/put_role.go +++ b/typedapi/security/putrole/put_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds and updates roles in the native realm. package putrole @@ -207,7 +207,6 @@ func (r PutRole) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/putrole/request.go b/typedapi/security/putrole/request.go old mode 100755 new mode 100644 index be792d474f..7840823f92 --- a/typedapi/security/putrole/request.go +++ b/typedapi/security/putrole/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putrole @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_role/SecurityPutRoleRequest.ts#L31-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_role/SecurityPutRoleRequest.ts#L31-L80 type Request struct { // Applications A list of application privilege entries. @@ -46,7 +46,7 @@ type Request struct { Indices []types.IndicesPrivileges `json:"indices,omitempty"` // Metadata Optional metadata. Within the metadata object, keys that begin with an // underscore (`_`) are reserved for system use. - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` // RunAs A list of users that the owners of this role can impersonate. RunAs []string `json:"run_as,omitempty"` // TransientMetadata Indicates roles that might be incompatible with the current cluster license, diff --git a/typedapi/security/putrole/response.go b/typedapi/security/putrole/response.go old mode 100755 new mode 100644 index 6beff05107..fd5e4918c6 --- a/typedapi/security/putrole/response.go +++ b/typedapi/security/putrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 type Response struct { Role types.CreatedStatus `json:"role"` diff --git a/typedapi/security/putrolemapping/put_role_mapping.go b/typedapi/security/putrolemapping/put_role_mapping.go old mode 100755 new mode 100644 index b1378bb610..17acbc6b55 --- a/typedapi/security/putrolemapping/put_role_mapping.go +++ b/typedapi/security/putrolemapping/put_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates and updates role mappings. package putrolemapping @@ -207,7 +207,6 @@ func (r PutRoleMapping) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/putrolemapping/request.go b/typedapi/security/putrolemapping/request.go old mode 100755 new mode 100644 index 61c4abf1f5..f04800e21a --- a/typedapi/security/putrolemapping/request.go +++ b/typedapi/security/putrolemapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putrolemapping @@ -29,13 +29,13 @@ import ( // Request holds the request body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L24-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L24-L43 type Request struct { - Enabled *bool `json:"enabled,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - Roles []string `json:"roles,omitempty"` - Rules *types.RoleMappingRule `json:"rules,omitempty"` - RunAs []string `json:"run_as,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` + Roles []string `json:"roles,omitempty"` + Rules *types.RoleMappingRule `json:"rules,omitempty"` + RunAs []string `json:"run_as,omitempty"` } // NewRequest returns a Request diff --git a/typedapi/security/putrolemapping/response.go b/typedapi/security/putrolemapping/response.go old mode 100755 new mode 100644 index 2bc723d4e8..c89f7e4df7 --- a/typedapi/security/putrolemapping/response.go +++ b/typedapi/security/putrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 type Response struct { Created *bool `json:"created,omitempty"` diff --git a/typedapi/security/putuser/put_user.go b/typedapi/security/putuser/put_user.go old mode 100755 new mode 100644 index 191eea2c8c..bd14ddaa5a --- a/typedapi/security/putuser/put_user.go +++ b/typedapi/security/putuser/put_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds and updates users in the native realm. These users are commonly referred // to as native users. @@ -209,7 +209,6 @@ func (r PutUser) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/putuser/request.go b/typedapi/security/putuser/request.go old mode 100755 new mode 100644 index 01846b8597..8bb38b4c6f --- a/typedapi/security/putuser/request.go +++ b/typedapi/security/putuser/request.go @@ -16,27 +16,29 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putuser import ( "encoding/json" "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Request holds the request body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_user/SecurityPutUserRequest.ts#L23-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_user/SecurityPutUserRequest.ts#L23-L45 type Request struct { - Email string `json:"email,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - FullName string `json:"full_name,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - Password *string `json:"password,omitempty"` - PasswordHash *string `json:"password_hash,omitempty"` - Roles []string `json:"roles,omitempty"` - Username *string `json:"username,omitempty"` + Email string `json:"email,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + FullName string `json:"full_name,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` + Password *string `json:"password,omitempty"` + PasswordHash *string `json:"password_hash,omitempty"` + Roles []string `json:"roles,omitempty"` + Username *string `json:"username,omitempty"` } // NewRequest returns a Request diff --git a/typedapi/security/putuser/response.go b/typedapi/security/putuser/response.go old mode 100755 new mode 100644 index 6060c8a3a1..db1f8fdb53 --- a/typedapi/security/putuser/response.go +++ b/typedapi/security/putuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putuser // Response holds the response body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 type Response struct { Created bool `json:"created"` diff --git a/typedapi/security/queryapikeys/query_api_keys.go b/typedapi/security/queryapikeys/query_api_keys.go old mode 100755 new mode 100644 index e42d562434..90f6c45335 --- a/typedapi/security/queryapikeys/query_api_keys.go +++ b/typedapi/security/queryapikeys/query_api_keys.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information for API keys using a subset of query DSL package queryapikeys @@ -197,7 +197,6 @@ func (r QueryApiKeys) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/queryapikeys/request.go b/typedapi/security/queryapikeys/request.go old mode 100755 new mode 100644 index 349cf610f0..f3bb0d7ef6 --- a/typedapi/security/queryapikeys/request.go +++ b/typedapi/security/queryapikeys/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package queryapikeys @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/query_api_keys/QueryApiKeysRequest.ts#L25-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/query_api_keys/QueryApiKeysRequest.ts#L25-L67 type Request struct { // From Starting document offset. By default, you cannot page through more than diff --git a/typedapi/security/queryapikeys/response.go b/typedapi/security/queryapikeys/response.go old mode 100755 new mode 100644 index dc8775f624..d1b2f8eeb9 --- a/typedapi/security/queryapikeys/response.go +++ b/typedapi/security/queryapikeys/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package queryapikeys @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/query_api_keys/QueryApiKeysResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/query_api_keys/QueryApiKeysResponse.ts#L23-L29 type Response struct { ApiKeys []types.ApiKey `json:"api_keys"` diff --git a/typedapi/security/samlauthenticate/request.go b/typedapi/security/samlauthenticate/request.go old mode 100755 new mode 100644 index 49a711b3df..d7f95f5e01 --- a/typedapi/security/samlauthenticate/request.go +++ b/typedapi/security/samlauthenticate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlauthenticate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_authenticate/Request.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_authenticate/Request.ts#L23-L38 type Request struct { // Content The SAML response as it was sent by the user’s browser, usually a Base64 diff --git a/typedapi/security/samlauthenticate/response.go b/typedapi/security/samlauthenticate/response.go old mode 100755 new mode 100644 index 4a3e551491..1f0fd9f686 --- a/typedapi/security/samlauthenticate/response.go +++ b/typedapi/security/samlauthenticate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlauthenticate // Response holds the response body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_authenticate/Response.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_authenticate/Response.ts#L22-L30 type Response struct { AccessToken string `json:"access_token"` diff --git a/typedapi/security/samlauthenticate/saml_authenticate.go b/typedapi/security/samlauthenticate/saml_authenticate.go old mode 100755 new mode 100644 index d780a7969f..8c77fa9bc7 --- a/typedapi/security/samlauthenticate/saml_authenticate.go +++ b/typedapi/security/samlauthenticate/saml_authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Exchanges a SAML Response message for an Elasticsearch access token and // refresh token pair @@ -198,7 +198,6 @@ func (r SamlAuthenticate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/samlcompletelogout/request.go b/typedapi/security/samlcompletelogout/request.go old mode 100755 new mode 100644 index ef1ce96c5b..6026e48a0c --- a/typedapi/security/samlcompletelogout/request.go +++ b/typedapi/security/samlcompletelogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlcompletelogout @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlcompletelogout // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_complete_logout/Request.ts#L23-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_complete_logout/Request.ts#L23-L40 type Request struct { // Content If the SAML IdP sends the logout response with the HTTP-Post binding, this diff --git a/typedapi/security/samlcompletelogout/response.go b/typedapi/security/samlcompletelogout/response.go deleted file mode 100755 index 9fad33598b..0000000000 --- a/typedapi/security/samlcompletelogout/response.go +++ /dev/null @@ -1,34 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - -package samlcompletelogout - -// Response holds the response body struct for the package samlcompletelogout -// -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_complete_logout/Response.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} diff --git a/typedapi/security/samlcompletelogout/saml_complete_logout.go b/typedapi/security/samlcompletelogout/saml_complete_logout.go old mode 100755 new mode 100644 index 73f0e835cd..5e2da62835 --- a/typedapi/security/samlcompletelogout/saml_complete_logout.go +++ b/typedapi/security/samlcompletelogout/saml_complete_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Verifies the logout response sent from the SAML IdP package samlcompletelogout @@ -33,7 +33,6 @@ import ( "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // ErrBuildPath is returned in case of missing parameters within the build of the request. @@ -178,36 +177,6 @@ func (r SamlCompleteLogout) Perform(ctx context.Context) (*http.Response, error) return res, nil } -// Do runs the request through the transport, handle the response and returns a samlcompletelogout.Response -func (r SamlCompleteLogout) Do(ctx context.Context) (*Response, error) { - - response := NewResponse() - - res, err := r.Perform(ctx) - if err != nil { - return nil, err - } - defer res.Body.Close() - - if res.StatusCode < 299 { - err = json.NewDecoder(res.Body).Decode(response) - if err != nil { - return nil, err - } - - return response, nil - - } - - errorResponse := types.NewElasticsearchError() - err = json.NewDecoder(res.Body).Decode(errorResponse) - if err != nil { - return nil, err - } - - return nil, errorResponse -} - // Header set a key, value pair in the SamlCompleteLogout headers map. func (r *SamlCompleteLogout) Header(key, value string) *SamlCompleteLogout { r.headers.Set(key, value) diff --git a/typedapi/security/samlinvalidate/request.go b/typedapi/security/samlinvalidate/request.go old mode 100755 new mode 100644 index 95afd465fa..01a275c5c7 --- a/typedapi/security/samlinvalidate/request.go +++ b/typedapi/security/samlinvalidate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlinvalidate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_invalidate/Request.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_invalidate/Request.ts#L22-L43 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realm in diff --git a/typedapi/security/samlinvalidate/response.go b/typedapi/security/samlinvalidate/response.go old mode 100755 new mode 100644 index 74f40180a6..00fe7c9fb0 --- a/typedapi/security/samlinvalidate/response.go +++ b/typedapi/security/samlinvalidate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlinvalidate // Response holds the response body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_invalidate/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_invalidate/Response.ts#L22-L28 type Response struct { Invalidated int `json:"invalidated"` diff --git a/typedapi/security/samlinvalidate/saml_invalidate.go b/typedapi/security/samlinvalidate/saml_invalidate.go old mode 100755 new mode 100644 index ea7b36b6db..50022689d2 --- a/typedapi/security/samlinvalidate/saml_invalidate.go +++ b/typedapi/security/samlinvalidate/saml_invalidate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Consumes a SAML LogoutRequest package samlinvalidate @@ -196,7 +196,6 @@ func (r SamlInvalidate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/samllogout/request.go b/typedapi/security/samllogout/request.go old mode 100755 new mode 100644 index 398f53fdf2..4f2dbbb07a --- a/typedapi/security/samllogout/request.go +++ b/typedapi/security/samllogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samllogout @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_logout/Request.ts#L22-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_logout/Request.ts#L22-L41 type Request struct { // RefreshToken The refresh token that was returned as a response to calling the SAML diff --git a/typedapi/security/samllogout/response.go b/typedapi/security/samllogout/response.go old mode 100755 new mode 100644 index 8b21ac5a47..5ae36e48cb --- a/typedapi/security/samllogout/response.go +++ b/typedapi/security/samllogout/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samllogout // Response holds the response body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_logout/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_logout/Response.ts#L20-L24 type Response struct { Redirect string `json:"redirect"` diff --git a/typedapi/security/samllogout/saml_logout.go b/typedapi/security/samllogout/saml_logout.go old mode 100755 new mode 100644 index c6a54de701..8ef8020bb2 --- a/typedapi/security/samllogout/saml_logout.go +++ b/typedapi/security/samllogout/saml_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Invalidates an access token and a refresh token that were generated via the // SAML Authenticate API @@ -198,7 +198,6 @@ func (r SamlLogout) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/samlprepareauthentication/request.go b/typedapi/security/samlprepareauthentication/request.go old mode 100755 new mode 100644 index 701574cd06..71dfeea695 --- a/typedapi/security/samlprepareauthentication/request.go +++ b/typedapi/security/samlprepareauthentication/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlprepareauthentication @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_prepare_authentication/Request.ts#L22-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_prepare_authentication/Request.ts#L22-L46 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realms in diff --git a/typedapi/security/samlprepareauthentication/response.go b/typedapi/security/samlprepareauthentication/response.go old mode 100755 new mode 100644 index 3f33ff7fef..fe9c2e9d14 --- a/typedapi/security/samlprepareauthentication/response.go +++ b/typedapi/security/samlprepareauthentication/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlprepareauthentication // Response holds the response body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_prepare_authentication/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_prepare_authentication/Response.ts#L22-L28 type Response struct { Id string `json:"id"` diff --git a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go old mode 100755 new mode 100644 index 536a035428..4a232002da --- a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go +++ b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a SAML authentication request package samlprepareauthentication @@ -196,7 +196,6 @@ func (r SamlPrepareAuthentication) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/samlserviceprovidermetadata/response.go b/typedapi/security/samlserviceprovidermetadata/response.go old mode 100755 new mode 100644 index 8066866bbe..e7ab176d25 --- a/typedapi/security/samlserviceprovidermetadata/response.go +++ b/typedapi/security/samlserviceprovidermetadata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package samlserviceprovidermetadata // Response holds the response body struct for the package samlserviceprovidermetadata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 type Response struct { Metadata string `json:"metadata"` diff --git a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go old mode 100755 new mode 100644 index 4b2541e4e8..df4337faaf --- a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go +++ b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider package samlserviceprovidermetadata @@ -178,7 +178,6 @@ func (r SamlServiceProviderMetadata) Do(ctx context.Context) (*Response, error) } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/suggestuserprofiles/request.go b/typedapi/security/suggestuserprofiles/request.go old mode 100755 new mode 100644 index fe722a3060..c80cb98455 --- a/typedapi/security/suggestuserprofiles/request.go +++ b/typedapi/security/suggestuserprofiles/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package suggestuserprofiles @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/suggest_user_profiles/Request.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/suggest_user_profiles/Request.ts#L24-L66 type Request struct { // Data List of filters for the `data` field of the profile document. diff --git a/typedapi/security/suggestuserprofiles/response.go b/typedapi/security/suggestuserprofiles/response.go old mode 100755 new mode 100644 index c5a0205638..6b7bf43cff --- a/typedapi/security/suggestuserprofiles/response.go +++ b/typedapi/security/suggestuserprofiles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package suggestuserprofiles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/suggest_user_profiles/Response.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/suggest_user_profiles/Response.ts#L29-L35 type Response struct { Profiles []types.UserProfile `json:"profiles"` diff --git a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go old mode 100755 new mode 100644 index b9b731cfb4..d994037f2c --- a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go +++ b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Get suggestions for user profiles that match specified search criteria. package suggestuserprofiles @@ -196,7 +196,6 @@ func (r SuggestUserProfiles) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/updateapikey/request.go b/typedapi/security/updateapikey/request.go old mode 100755 new mode 100644 index 46477e4d5d..c6785eeafb --- a/typedapi/security/updateapikey/request.go +++ b/typedapi/security/updateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updateapikey @@ -29,13 +29,13 @@ import ( // Request holds the request body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/update_api_key/Request.ts#L25-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/update_api_key/Request.ts#L25-L49 type Request struct { // Metadata Arbitrary metadata that you want to associate with the API key. It supports // nested data structure. Within the metadata object, keys beginning with _ are // reserved for system usage. - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` // RoleDescriptors An array of role descriptors for this API key. This parameter is optional. // When it is not specified or is an empty array, then the API key will have a // point in time snapshot of permissions of the authenticated user. If you diff --git a/typedapi/security/updateapikey/response.go b/typedapi/security/updateapikey/response.go old mode 100755 new mode 100644 index 94d7caa8cc..3014c68d8a --- a/typedapi/security/updateapikey/response.go +++ b/typedapi/security/updateapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updateapikey // Response holds the response body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/update_api_key/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/update_api_key/Response.ts#L20-L24 type Response struct { Updated bool `json:"updated"` diff --git a/typedapi/security/updateapikey/update_api_key.go b/typedapi/security/updateapikey/update_api_key.go old mode 100755 new mode 100644 index 4c3018b051..b3c7b17ede --- a/typedapi/security/updateapikey/update_api_key.go +++ b/typedapi/security/updateapikey/update_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates attributes of an existing API key. package updateapikey @@ -205,7 +205,6 @@ func (r UpdateApiKey) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/security/updateuserprofiledata/request.go b/typedapi/security/updateuserprofiledata/request.go old mode 100755 new mode 100644 index 2609db1abb..e2417a39df --- a/typedapi/security/updateuserprofiledata/request.go +++ b/typedapi/security/updateuserprofiledata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updateuserprofiledata @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/update_user_profile_data/Request.ts#L27-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/update_user_profile_data/Request.ts#L27-L70 type Request struct { // Data Non-searchable data that you want to associate with the user profile. diff --git a/typedapi/security/updateuserprofiledata/response.go b/typedapi/security/updateuserprofiledata/response.go old mode 100755 new mode 100644 index cec7b2e2a2..f41909b053 --- a/typedapi/security/updateuserprofiledata/response.go +++ b/typedapi/security/updateuserprofiledata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updateuserprofiledata // Response holds the response body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/update_user_profile_data/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/update_user_profile_data/Response.ts#L22-L24 type Response struct { diff --git a/typedapi/security/updateuserprofiledata/update_user_profile_data.go b/typedapi/security/updateuserprofiledata/update_user_profile_data.go old mode 100755 new mode 100644 index 51f2c23162..8223c17923 --- a/typedapi/security/updateuserprofiledata/update_user_profile_data.go +++ b/typedapi/security/updateuserprofiledata/update_user_profile_data.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Update application specific data for the user profile of the given unique ID. package updateuserprofiledata @@ -209,7 +209,6 @@ func (r UpdateUserProfileData) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/shutdown/deletenode/delete_node.go b/typedapi/shutdown/deletenode/delete_node.go old mode 100755 new mode 100644 index f3e5845de5..f73e2bc886 --- a/typedapi/shutdown/deletenode/delete_node.go +++ b/typedapi/shutdown/deletenode/delete_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes a node from the shutdown list. Designed for indirect use by ECE/ESS // and ECK. Direct use is not supported. @@ -180,7 +180,6 @@ func (r DeleteNode) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/shutdown/deletenode/response.go b/typedapi/shutdown/deletenode/response.go old mode 100755 new mode 100644 index 7b8689825a..f2e68d416e --- a/typedapi/shutdown/deletenode/response.go +++ b/typedapi/shutdown/deletenode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletenode // Response holds the response body struct for the package deletenode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/shutdown/getnode/get_node.go b/typedapi/shutdown/getnode/get_node.go old mode 100755 new mode 100644 index 099868b88a..65b54264b8 --- a/typedapi/shutdown/getnode/get_node.go +++ b/typedapi/shutdown/getnode/get_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieve status of a node or nodes that are currently marked as shutting // down. Designed for indirect use by ECE/ESS and ECK. Direct use is not @@ -187,7 +187,6 @@ func (r GetNode) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/shutdown/getnode/response.go b/typedapi/shutdown/getnode/response.go old mode 100755 new mode 100644 index ca5e022be2..35e84e395c --- a/typedapi/shutdown/getnode/response.go +++ b/typedapi/shutdown/getnode/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getnode @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getnode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 type Response struct { Nodes []types.NodeShutdownStatus `json:"nodes"` diff --git a/typedapi/shutdown/putnode/put_node.go b/typedapi/shutdown/putnode/put_node.go old mode 100755 new mode 100644 index c5fb785085..d62d5585d9 --- a/typedapi/shutdown/putnode/put_node.go +++ b/typedapi/shutdown/putnode/put_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. @@ -209,7 +209,6 @@ func (r PutNode) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/shutdown/putnode/request.go b/typedapi/shutdown/putnode/request.go old mode 100755 new mode 100644 index a195e6c996..0fa1c31962 --- a/typedapi/shutdown/putnode/request.go +++ b/typedapi/shutdown/putnode/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putnode @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L77 type Request struct { // AllocationDelay Only valid if type is restart. diff --git a/typedapi/shutdown/putnode/response.go b/typedapi/shutdown/putnode/response.go old mode 100755 new mode 100644 index cbb9df8b79..720369c84a --- a/typedapi/shutdown/putnode/response.go +++ b/typedapi/shutdown/putnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putnode // Response holds the response body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/deletelifecycle/delete_lifecycle.go b/typedapi/slm/deletelifecycle/delete_lifecycle.go old mode 100755 new mode 100644 index 74cd46b068..cf93c76da1 --- a/typedapi/slm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/slm/deletelifecycle/delete_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing snapshot lifecycle policy. package deletelifecycle @@ -170,7 +170,6 @@ func (r DeleteLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/deletelifecycle/response.go b/typedapi/slm/deletelifecycle/response.go old mode 100755 new mode 100644 index c4521e0d63..ebd21b6a25 --- a/typedapi/slm/deletelifecycle/response.go +++ b/typedapi/slm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/executelifecycle/execute_lifecycle.go b/typedapi/slm/executelifecycle/execute_lifecycle.go old mode 100755 new mode 100644 index a3c976d11a..3f8a400e4a --- a/typedapi/slm/executelifecycle/execute_lifecycle.go +++ b/typedapi/slm/executelifecycle/execute_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Immediately creates a snapshot according to the lifecycle policy, without // waiting for the scheduled time. @@ -174,7 +174,6 @@ func (r ExecuteLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/executelifecycle/response.go b/typedapi/slm/executelifecycle/response.go old mode 100755 new mode 100644 index b8f783cc7a..bcdc4b27bf --- a/typedapi/slm/executelifecycle/response.go +++ b/typedapi/slm/executelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package executelifecycle // Response holds the response body struct for the package executelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { SnapshotName string `json:"snapshot_name"` diff --git a/typedapi/slm/executeretention/execute_retention.go b/typedapi/slm/executeretention/execute_retention.go old mode 100755 new mode 100644 index 1b189fb699..7af9948fa8 --- a/typedapi/slm/executeretention/execute_retention.go +++ b/typedapi/slm/executeretention/execute_retention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes any snapshots that are expired according to the policy's retention // rules. @@ -161,7 +161,6 @@ func (r ExecuteRetention) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/executeretention/response.go b/typedapi/slm/executeretention/response.go old mode 100755 new mode 100644 index ea00e55020..b4929e4b58 --- a/typedapi/slm/executeretention/response.go +++ b/typedapi/slm/executeretention/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package executeretention // Response holds the response body struct for the package executeretention // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/getlifecycle/get_lifecycle.go b/typedapi/slm/getlifecycle/get_lifecycle.go old mode 100755 new mode 100644 index 314e621251..93ab9ff1f7 --- a/typedapi/slm/getlifecycle/get_lifecycle.go +++ b/typedapi/slm/getlifecycle/get_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves one or more snapshot lifecycle policy definitions and information // about the latest snapshot attempts. @@ -177,7 +177,6 @@ func (r GetLifecycle) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/getlifecycle/response.go b/typedapi/slm/getlifecycle/response.go old mode 100755 new mode 100644 index b9510a6081..8efa5ad38e --- a/typedapi/slm/getlifecycle/response.go +++ b/typedapi/slm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L26 type Response map[string]types.SnapshotLifecycle diff --git a/typedapi/slm/getstats/get_stats.go b/typedapi/slm/getstats/get_stats.go old mode 100755 new mode 100644 index 6d8e57dc54..7579ebcca0 --- a/typedapi/slm/getstats/get_stats.go +++ b/typedapi/slm/getstats/get_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns global and policy-level statistics about actions taken by snapshot // lifecycle management. @@ -161,7 +161,6 @@ func (r GetStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/getstats/response.go b/typedapi/slm/getstats/response.go old mode 100755 new mode 100644 index 61e0c63f72..851d3f9b30 --- a/typedapi/slm/getstats/response.go +++ b/typedapi/slm/getstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 type Response struct { PolicyStats []string `json:"policy_stats"` diff --git a/typedapi/slm/getstatus/get_status.go b/typedapi/slm/getstatus/get_status.go old mode 100755 new mode 100644 index 1e5be3d61c..0b2defa94e --- a/typedapi/slm/getstatus/get_status.go +++ b/typedapi/slm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the status of snapshot lifecycle management (SLM). package getstatus @@ -159,7 +159,6 @@ func (r GetStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/getstatus/response.go b/typedapi/slm/getstatus/response.go old mode 100755 new mode 100644 index b2d3607e62..7adef72586 --- a/typedapi/slm/getstatus/response.go +++ b/typedapi/slm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` diff --git a/typedapi/slm/putlifecycle/put_lifecycle.go b/typedapi/slm/putlifecycle/put_lifecycle.go old mode 100755 new mode 100644 index 4c1984cf1e..7ca4347cc9 --- a/typedapi/slm/putlifecycle/put_lifecycle.go +++ b/typedapi/slm/putlifecycle/put_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates or updates a snapshot lifecycle policy. package putlifecycle @@ -205,7 +205,6 @@ func (r PutLifecycle) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/putlifecycle/request.go b/typedapi/slm/putlifecycle/request.go old mode 100755 new mode 100644 index 6d7c66e90a..25259dff30 --- a/typedapi/slm/putlifecycle/request.go +++ b/typedapi/slm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putlifecycle @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 type Request struct { // Config Configuration for each snapshot created by the policy. diff --git a/typedapi/slm/putlifecycle/response.go b/typedapi/slm/putlifecycle/response.go old mode 100755 new mode 100644 index 5222bacb85..05e4e82fd0 --- a/typedapi/slm/putlifecycle/response.go +++ b/typedapi/slm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/start/response.go b/typedapi/slm/start/response.go old mode 100755 new mode 100644 index b1925d8a8e..2c89b72e38 --- a/typedapi/slm/start/response.go +++ b/typedapi/slm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/start/start.go b/typedapi/slm/start/start.go old mode 100755 new mode 100644 index 00c54655fe..eff448ee5e --- a/typedapi/slm/start/start.go +++ b/typedapi/slm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Turns on snapshot lifecycle management (SLM). package start @@ -159,7 +159,6 @@ func (r Start) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/slm/stop/response.go b/typedapi/slm/stop/response.go old mode 100755 new mode 100644 index ca665be5f9..0b0a3e3311 --- a/typedapi/slm/stop/response.go +++ b/typedapi/slm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/slm/stop/stop.go b/typedapi/slm/stop/stop.go old mode 100755 new mode 100644 index e6dfb1a796..7dfe8cb07c --- a/typedapi/slm/stop/stop.go +++ b/typedapi/slm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Turns off snapshot lifecycle management (SLM). package stop @@ -159,7 +159,6 @@ func (r Stop) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/cleanuprepository/cleanup_repository.go b/typedapi/snapshot/cleanuprepository/cleanup_repository.go old mode 100755 new mode 100644 index 3f1962341f..95eb302ef4 --- a/typedapi/snapshot/cleanuprepository/cleanup_repository.go +++ b/typedapi/snapshot/cleanuprepository/cleanup_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes stale data from repository. package cleanuprepository @@ -170,7 +170,6 @@ func (r CleanupRepository) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/cleanuprepository/response.go b/typedapi/snapshot/cleanuprepository/response.go old mode 100755 new mode 100644 index 41847c9694..30a4a685b3 --- a/typedapi/snapshot/cleanuprepository/response.go +++ b/typedapi/snapshot/cleanuprepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package cleanuprepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cleanuprepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 type Response struct { diff --git a/typedapi/snapshot/clone/clone.go b/typedapi/snapshot/clone/clone.go old mode 100755 new mode 100644 index 56c83bc94b..0299efd47d --- a/typedapi/snapshot/clone/clone.go +++ b/typedapi/snapshot/clone/clone.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clones indices from one snapshot into another snapshot in the same // repository. @@ -223,7 +223,6 @@ func (r Clone) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/clone/request.go b/typedapi/snapshot/clone/request.go old mode 100755 new mode 100644 index 17faf1db29..b8d7c2c8bf --- a/typedapi/snapshot/clone/request.go +++ b/typedapi/snapshot/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clone @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 type Request struct { Indices string `json:"indices"` } diff --git a/typedapi/snapshot/clone/response.go b/typedapi/snapshot/clone/response.go old mode 100755 new mode 100644 index 83f0b6b03c..3bee937493 --- a/typedapi/snapshot/clone/response.go +++ b/typedapi/snapshot/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/snapshot/create/create.go b/typedapi/snapshot/create/create.go old mode 100755 new mode 100644 index 6a86f4c598..824d84b076 --- a/typedapi/snapshot/create/create.go +++ b/typedapi/snapshot/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a snapshot in a repository. package create @@ -212,7 +212,6 @@ func (r Create) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/create/request.go b/typedapi/snapshot/create/request.go old mode 100755 new mode 100644 index aa892390eb..c0187c8632 --- a/typedapi/snapshot/create/request.go +++ b/typedapi/snapshot/create/request.go @@ -16,18 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package create import ( "encoding/json" "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 type Request struct { // FeatureStates Feature states to include in the snapshot. Each feature state includes one or @@ -51,7 +53,7 @@ type Request struct { Indices []string `json:"indices,omitempty"` // Metadata Optional metadata for the snapshot. May have any contents. Must be less than // 1024 bytes. This map is not automatically generated by Elasticsearch. - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` // Partial If `true`, allows restoring a partial snapshot of indices with unavailable // shards. Only shards that were successfully included in the snapshot will be // restored. All missing shards will be recreated as empty. If `false`, the diff --git a/typedapi/snapshot/create/response.go b/typedapi/snapshot/create/response.go old mode 100755 new mode 100644 index d3fd123c5c..845405c12a --- a/typedapi/snapshot/create/response.go +++ b/typedapi/snapshot/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package create @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L34 type Response struct { diff --git a/typedapi/snapshot/createrepository/create_repository.go b/typedapi/snapshot/createrepository/create_repository.go old mode 100755 new mode 100644 index 2ba73574d6..876aff33ca --- a/typedapi/snapshot/createrepository/create_repository.go +++ b/typedapi/snapshot/createrepository/create_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a repository. package createrepository @@ -204,7 +204,6 @@ func (r CreateRepository) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/createrepository/request.go b/typedapi/snapshot/createrepository/request.go old mode 100755 new mode 100644 index 620971342f..a85e3c92ab --- a/typedapi/snapshot/createrepository/request.go +++ b/typedapi/snapshot/createrepository/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createrepository @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L28-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L28-L49 type Request struct { Repository *types.Repository `json:"repository,omitempty"` Settings types.RepositorySettings `json:"settings"` diff --git a/typedapi/snapshot/createrepository/response.go b/typedapi/snapshot/createrepository/response.go old mode 100755 new mode 100644 index 3eb1c71eb9..328f53d5ad --- a/typedapi/snapshot/createrepository/response.go +++ b/typedapi/snapshot/createrepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package createrepository // Response holds the response body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/snapshot/delete/delete.go b/typedapi/snapshot/delete/delete.go old mode 100755 new mode 100644 index 409a86ce05..2e08a5a769 --- a/typedapi/snapshot/delete/delete.go +++ b/typedapi/snapshot/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes one or more snapshots. package delete @@ -176,7 +176,6 @@ func (r Delete) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/delete/response.go b/typedapi/snapshot/delete/response.go old mode 100755 new mode 100644 index d7dcd92298..975ffdf09f --- a/typedapi/snapshot/delete/response.go +++ b/typedapi/snapshot/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/snapshot/deleterepository/delete_repository.go b/typedapi/snapshot/deleterepository/delete_repository.go old mode 100755 new mode 100644 index d9333d9f44..84f828cfcf --- a/typedapi/snapshot/deleterepository/delete_repository.go +++ b/typedapi/snapshot/deleterepository/delete_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes a repository. package deleterepository @@ -168,7 +168,6 @@ func (r DeleteRepository) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/deleterepository/response.go b/typedapi/snapshot/deleterepository/response.go old mode 100755 new mode 100644 index 457ace5559..cee7931e57 --- a/typedapi/snapshot/deleterepository/response.go +++ b/typedapi/snapshot/deleterepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleterepository // Response holds the response body struct for the package deleterepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/snapshot/get/get.go b/typedapi/snapshot/get/get.go old mode 100755 new mode 100644 index 056a2780a1..56d8eee220 --- a/typedapi/snapshot/get/get.go +++ b/typedapi/snapshot/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about a snapshot. package get @@ -180,7 +180,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/get/response.go b/typedapi/snapshot/get/response.go old mode 100755 new mode 100644 index d96f492d5e..89d882bb2a --- a/typedapi/snapshot/get/response.go +++ b/typedapi/snapshot/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/get/SnapshotGetResponse.ts#L25-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/get/SnapshotGetResponse.ts#L25-L40 type Response struct { diff --git a/typedapi/snapshot/getrepository/get_repository.go b/typedapi/snapshot/getrepository/get_repository.go old mode 100755 new mode 100644 index 4413bc41cd..10dd78a766 --- a/typedapi/snapshot/getrepository/get_repository.go +++ b/typedapi/snapshot/getrepository/get_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about a repository. package getrepository @@ -172,7 +172,6 @@ func (r GetRepository) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/getrepository/response.go b/typedapi/snapshot/getrepository/response.go old mode 100755 new mode 100644 index 498e8daa77..bd9fc125b3 --- a/typedapi/snapshot/getrepository/response.go +++ b/typedapi/snapshot/getrepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getrepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 type Response map[string]types.Repository diff --git a/typedapi/snapshot/restore/request.go b/typedapi/snapshot/restore/request.go old mode 100755 new mode 100644 index 1868b163a3..6450382401 --- a/typedapi/snapshot/restore/request.go +++ b/typedapi/snapshot/restore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package restore @@ -29,8 +29,9 @@ import ( // Request holds the request body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L51 type Request struct { + FeatureStates []string `json:"feature_states,omitempty"` IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` IgnoreUnavailable *bool `json:"ignore_unavailable,omitempty"` IncludeAliases *bool `json:"include_aliases,omitempty"` diff --git a/typedapi/snapshot/restore/response.go b/typedapi/snapshot/restore/response.go old mode 100755 new mode 100644 index 2434389720..0760da9ab0 --- a/typedapi/snapshot/restore/response.go +++ b/typedapi/snapshot/restore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package restore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 type Response struct { Snapshot types.SnapshotRestore `json:"snapshot"` diff --git a/typedapi/snapshot/restore/restore.go b/typedapi/snapshot/restore/restore.go old mode 100755 new mode 100644 index 05877297ed..f04c55e742 --- a/typedapi/snapshot/restore/restore.go +++ b/typedapi/snapshot/restore/restore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Restores a snapshot. package restore @@ -214,7 +214,6 @@ func (r Restore) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/status/response.go b/typedapi/snapshot/status/response.go old mode 100755 new mode 100644 index 0da645a2dc..f3cb2d1c63 --- a/typedapi/snapshot/status/response.go +++ b/typedapi/snapshot/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 type Response struct { Snapshots []types.Status `json:"snapshots"` diff --git a/typedapi/snapshot/status/status.go b/typedapi/snapshot/status/status.go old mode 100755 new mode 100644 index 1a2a5cd054..1b84d11a4b --- a/typedapi/snapshot/status/status.go +++ b/typedapi/snapshot/status/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about the status of a snapshot. package status @@ -192,7 +192,6 @@ func (r Status) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/snapshot/verifyrepository/response.go b/typedapi/snapshot/verifyrepository/response.go old mode 100755 new mode 100644 index bbbc035e95..3288888a37 --- a/typedapi/snapshot/verifyrepository/response.go +++ b/typedapi/snapshot/verifyrepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package verifyrepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package verifyrepository // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 type Response struct { Nodes map[string]types.CompactNodeInfo `json:"nodes"` diff --git a/typedapi/snapshot/verifyrepository/verify_repository.go b/typedapi/snapshot/verifyrepository/verify_repository.go old mode 100755 new mode 100644 index 0db3aeb471..7d11245746 --- a/typedapi/snapshot/verifyrepository/verify_repository.go +++ b/typedapi/snapshot/verifyrepository/verify_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Verifies a repository. package verifyrepository @@ -170,7 +170,6 @@ func (r VerifyRepository) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/some/pointers.go b/typedapi/some/pointers.go index e5c5e9fe53..d644730638 100644 --- a/typedapi/some/pointers.go +++ b/typedapi/some/pointers.go @@ -19,6 +19,8 @@ // on primitive types for the TypedAPI. package some +import "github.com/elastic/go-elasticsearch/v8/typedapi/types" + // String returns a pointer to a string func String(value string) *string { return &value @@ -95,8 +97,9 @@ func Float32(value float32) *float32 { } // Float64 returns a pointer to a float64 -func Float64(value float64) *float64 { - return &value +func Float64(value float64) *types.Float64 { + f := types.Float64(value) + return &f } // Complex64 returns a pointer to a complex64 diff --git a/typedapi/sql/clearcursor/clear_cursor.go b/typedapi/sql/clearcursor/clear_cursor.go old mode 100755 new mode 100644 index e19f36bae3..bf0fe97a4d --- a/typedapi/sql/clearcursor/clear_cursor.go +++ b/typedapi/sql/clearcursor/clear_cursor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Clears the SQL cursor package clearcursor @@ -194,7 +194,6 @@ func (r ClearCursor) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/sql/clearcursor/request.go b/typedapi/sql/clearcursor/request.go old mode 100755 new mode 100644 index 1f2ea37cd8..ee9d2a4366 --- a/typedapi/sql/clearcursor/request.go +++ b/typedapi/sql/clearcursor/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcursor @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L31 type Request struct { Cursor string `json:"cursor"` } diff --git a/typedapi/sql/clearcursor/response.go b/typedapi/sql/clearcursor/response.go old mode 100755 new mode 100644 index 0e7318f3ca..daa48b5619 --- a/typedapi/sql/clearcursor/response.go +++ b/typedapi/sql/clearcursor/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package clearcursor // Response holds the response body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 type Response struct { Succeeded bool `json:"succeeded"` diff --git a/typedapi/sql/deleteasync/delete_async.go b/typedapi/sql/deleteasync/delete_async.go old mode 100755 new mode 100644 index ce2d1d8fd1..a5f5da62b8 --- a/typedapi/sql/deleteasync/delete_async.go +++ b/typedapi/sql/deleteasync/delete_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an async SQL search or a stored synchronous SQL search. If the search // is still running, the API cancels it. @@ -174,7 +174,6 @@ func (r DeleteAsync) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/sql/deleteasync/response.go b/typedapi/sql/deleteasync/response.go old mode 100755 new mode 100644 index b4ff3bd494..ecd3aee961 --- a/typedapi/sql/deleteasync/response.go +++ b/typedapi/sql/deleteasync/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deleteasync // Response holds the response body struct for the package deleteasync // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/sql/getasync/get_async.go b/typedapi/sql/getasync/get_async.go old mode 100755 new mode 100644 index adeb573d1a..8a5965a8fd --- a/typedapi/sql/getasync/get_async.go +++ b/typedapi/sql/getasync/get_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the current status and available results for an async SQL search or // stored synchronous SQL search @@ -172,7 +172,6 @@ func (r GetAsync) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/sql/getasync/response.go b/typedapi/sql/getasync/response.go old mode 100755 new mode 100644 index 68f2c30d92..e5eff09ee2 --- a/typedapi/sql/getasync/response.go +++ b/typedapi/sql/getasync/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getasync @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package getasync // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 type Response struct { diff --git a/typedapi/sql/getasyncstatus/get_async_status.go b/typedapi/sql/getasyncstatus/get_async_status.go old mode 100755 new mode 100644 index c407a4a3ef..6b5b190ba8 --- a/typedapi/sql/getasyncstatus/get_async_status.go +++ b/typedapi/sql/getasyncstatus/get_async_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns the current status of an async SQL search or a stored synchronous SQL // search @@ -174,7 +174,6 @@ func (r GetAsyncStatus) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/sql/getasyncstatus/response.go b/typedapi/sql/getasyncstatus/response.go old mode 100755 new mode 100644 index 25298cd5a8..88ab79e281 --- a/typedapi/sql/getasyncstatus/response.go +++ b/typedapi/sql/getasyncstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getasyncstatus // Response holds the response body struct for the package getasyncstatus // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 type Response struct { diff --git a/typedapi/sql/query/query.go b/typedapi/sql/query/query.go old mode 100755 new mode 100644 index 5051656732..092a265715 --- a/typedapi/sql/query/query.go +++ b/typedapi/sql/query/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Executes a SQL request package query @@ -192,7 +192,6 @@ func (r Query) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/sql/query/request.go b/typedapi/sql/query/request.go old mode 100755 new mode 100644 index caa89be46f..063cfda010 --- a/typedapi/sql/query/request.go +++ b/typedapi/sql/query/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package query @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/query/QuerySqlRequest.ts#L28-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/query/QuerySqlRequest.ts#L28-L115 type Request struct { // Catalog Default catalog (cluster) for queries. If unspecified, the queries execute on @@ -65,7 +65,7 @@ type Request struct { RequestTimeout types.Duration `json:"request_timeout,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. These fields take // precedence over mapped fields with the same name. - RuntimeMappings map[string]types.RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` // TimeZone Time-zone in ISO 8601 used for executing the query on the server. More // information available here. TimeZone *string `json:"time_zone,omitempty"` diff --git a/typedapi/sql/query/response.go b/typedapi/sql/query/response.go old mode 100755 new mode 100644 index f951d763be..c7fcd1c189 --- a/typedapi/sql/query/response.go +++ b/typedapi/sql/query/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package query @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/query/QuerySqlResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/query/QuerySqlResponse.ts#L23-L60 type Response struct { diff --git a/typedapi/sql/translate/request.go b/typedapi/sql/translate/request.go old mode 100755 new mode 100644 index 6b2c956c69..302cd98adc --- a/typedapi/sql/translate/request.go +++ b/typedapi/sql/translate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package translate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/translate/TranslateSqlRequest.ts#L25-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/translate/TranslateSqlRequest.ts#L25-L37 type Request struct { FetchSize *int `json:"fetch_size,omitempty"` Filter *types.Query `json:"filter,omitempty"` diff --git a/typedapi/sql/translate/response.go b/typedapi/sql/translate/response.go old mode 100755 new mode 100644 index 5426b6ba90..4947cc86cc --- a/typedapi/sql/translate/response.go +++ b/typedapi/sql/translate/response.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package translate import ( + "bytes" + "encoding/json" + "errors" + "io" + "strconv" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` @@ -44,3 +50,76 @@ func NewResponse() *Response { } return r } + +func (s *Response) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]types.Aggregations, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return err + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int64(v) + s.Size = &f + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(types.SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + } + } + return nil +} diff --git a/typedapi/sql/translate/translate.go b/typedapi/sql/translate/translate.go old mode 100755 new mode 100644 index bd96a15309..0fad41d700 --- a/typedapi/sql/translate/translate.go +++ b/typedapi/sql/translate/translate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Translates SQL into Elasticsearch queries package translate @@ -194,7 +194,6 @@ func (r Translate) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ssl/certificates/certificates.go b/typedapi/ssl/certificates/certificates.go old mode 100755 new mode 100644 index 36ce69bac7..7ccbdf3e37 --- a/typedapi/ssl/certificates/certificates.go +++ b/typedapi/ssl/certificates/certificates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the X.509 certificates used to encrypt // communications in the cluster. @@ -161,7 +161,6 @@ func (r Certificates) Do(ctx context.Context) (Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/ssl/certificates/response.go b/typedapi/ssl/certificates/response.go old mode 100755 new mode 100644 index aaa156a2b2..67b182ac10 --- a/typedapi/ssl/certificates/response.go +++ b/typedapi/ssl/certificates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package certificates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package certificates // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 type Response []types.CertificateInformation diff --git a/typedapi/tasks/cancel/cancel.go b/typedapi/tasks/cancel/cancel.go old mode 100755 new mode 100644 index 452e433d7e..49b7b21e12 --- a/typedapi/tasks/cancel/cancel.go +++ b/typedapi/tasks/cancel/cancel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Cancels a task, if it can be cancelled through an API. package cancel @@ -176,7 +176,6 @@ func (r Cancel) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/tasks/cancel/response.go b/typedapi/tasks/cancel/response.go old mode 100755 new mode 100644 index 7146f090e3..86f384691f --- a/typedapi/tasks/cancel/response.go +++ b/typedapi/tasks/cancel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package cancel @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package cancel // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` @@ -73,6 +73,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "nodes": + if s.Nodes == nil { + s.Nodes = make(map[string]types.NodeTasks, 0) + } if err := dec.Decode(&s.Nodes); err != nil { return err } @@ -83,8 +86,24 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "tasks": - if err := dec.Decode(&s.Tasks); err != nil { - return err + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]types.ParentTaskInfo, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o + case '[': + o := []types.TaskInfo{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o } } diff --git a/typedapi/tasks/get/get.go b/typedapi/tasks/get/get.go old mode 100755 new mode 100644 index 81f01e03a8..dec8bbb224 --- a/typedapi/tasks/get/get.go +++ b/typedapi/tasks/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns information about a task. package get @@ -169,7 +169,6 @@ func (r Get) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/tasks/get/response.go b/typedapi/tasks/get/response.go old mode 100755 new mode 100644 index e3706a8948..f67f0d0eed --- a/typedapi/tasks/get/response.go +++ b/typedapi/tasks/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/get/GetTaskResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/get/GetTaskResponse.ts#L24-L31 type Response struct { Completed bool `json:"completed"` diff --git a/typedapi/tasks/list/list.go b/typedapi/tasks/list/list.go old mode 100755 new mode 100644 index 76dfd6b53d..c46a89f173 --- a/typedapi/tasks/list/list.go +++ b/typedapi/tasks/list/list.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Returns a list of tasks. package list @@ -160,7 +160,6 @@ func (r List) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/tasks/list/response.go b/typedapi/tasks/list/response.go old mode 100755 new mode 100644 index 7853ea0dc2..1067ad11ac --- a/typedapi/tasks/list/response.go +++ b/typedapi/tasks/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package list @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/list/ListTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/list/ListTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` @@ -73,6 +73,9 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "nodes": + if s.Nodes == nil { + s.Nodes = make(map[string]types.NodeTasks, 0) + } if err := dec.Decode(&s.Nodes); err != nil { return err } @@ -83,8 +86,24 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "tasks": - if err := dec.Decode(&s.Tasks); err != nil { - return err + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]types.ParentTaskInfo, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o + case '[': + o := []types.TaskInfo{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Tasks = o } } diff --git a/typedapi/transform/deletetransform/delete_transform.go b/typedapi/transform/deletetransform/delete_transform.go old mode 100755 new mode 100644 index 5d1a4c9870..58f6e8c0c8 --- a/typedapi/transform/deletetransform/delete_transform.go +++ b/typedapi/transform/deletetransform/delete_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deletes an existing transform. package deletetransform @@ -169,7 +169,6 @@ func (r DeleteTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/deletetransform/response.go b/typedapi/transform/deletetransform/response.go old mode 100755 new mode 100644 index ae389dc2dd..7f2077542c --- a/typedapi/transform/deletetransform/response.go +++ b/typedapi/transform/deletetransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletetransform // Response holds the response body struct for the package deletetransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/transform/gettransform/get_transform.go b/typedapi/transform/gettransform/get_transform.go old mode 100755 new mode 100644 index 5233caba5e..00cbc036b4 --- a/typedapi/transform/gettransform/get_transform.go +++ b/typedapi/transform/gettransform/get_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves configuration information for transforms. package gettransform @@ -172,7 +172,6 @@ func (r GetTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/gettransform/response.go b/typedapi/transform/gettransform/response.go old mode 100755 new mode 100644 index e826601c78..e5c73bc85d --- a/typedapi/transform/gettransform/response.go +++ b/typedapi/transform/gettransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettransform @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/transform/gettransformstats/get_transform_stats.go b/typedapi/transform/gettransformstats/get_transform_stats.go old mode 100755 new mode 100644 index 62a304c53f..fb6998991e --- a/typedapi/transform/gettransformstats/get_transform_stats.go +++ b/typedapi/transform/gettransformstats/get_transform_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information for transforms. package gettransformstats @@ -171,7 +171,6 @@ func (r GetTransformStats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/gettransformstats/response.go b/typedapi/transform/gettransformstats/response.go old mode 100755 new mode 100644 index 350b321067..b3a4a54158 --- a/typedapi/transform/gettransformstats/response.go +++ b/typedapi/transform/gettransformstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package gettransformstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransformstats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` diff --git a/typedapi/transform/previewtransform/preview_transform.go b/typedapi/transform/previewtransform/preview_transform.go old mode 100755 new mode 100644 index c0673d29ac..6d30fb5637 --- a/typedapi/transform/previewtransform/preview_transform.go +++ b/typedapi/transform/previewtransform/preview_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Previews a transform. package previewtransform @@ -210,7 +210,6 @@ func (r PreviewTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/previewtransform/request.go b/typedapi/transform/previewtransform/request.go old mode 100755 new mode 100644 index c6af6dce29..a73fff70da --- a/typedapi/transform/previewtransform/request.go +++ b/typedapi/transform/previewtransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewtransform @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 type Request struct { // Description Free text description of the transform. diff --git a/typedapi/transform/previewtransform/response.go b/typedapi/transform/previewtransform/response.go old mode 100755 new mode 100644 index 98dc0cfc9d..c15c4f4133 --- a/typedapi/transform/previewtransform/response.go +++ b/typedapi/transform/previewtransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package previewtransform @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 type Response struct { GeneratedDestIndex types.IndexState `json:"generated_dest_index"` diff --git a/typedapi/transform/puttransform/put_transform.go b/typedapi/transform/puttransform/put_transform.go old mode 100755 new mode 100644 index c2fc3545a4..3fa7dcf492 --- a/typedapi/transform/puttransform/put_transform.go +++ b/typedapi/transform/puttransform/put_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Instantiates a transform. package puttransform @@ -204,7 +204,6 @@ func (r PutTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/puttransform/request.go b/typedapi/transform/puttransform/request.go old mode 100755 new mode 100644 index b593b048ec..b1328b8a51 --- a/typedapi/transform/puttransform/request.go +++ b/typedapi/transform/puttransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttransform @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 type Request struct { // Description Free text description of the transform. @@ -46,7 +46,7 @@ type Request struct { // unique key. Latest *types.Latest `json:"latest,omitempty"` // Meta_ Defines optional transform metadata. - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` // Pivot The pivot method transforms the data by aggregating and grouping it. These // objects define the group by fields // and the aggregation to reduce the data. diff --git a/typedapi/transform/puttransform/response.go b/typedapi/transform/puttransform/response.go old mode 100755 new mode 100644 index 7c3eee08b2..1435c43933 --- a/typedapi/transform/puttransform/response.go +++ b/typedapi/transform/puttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package puttransform // Response holds the response body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/transform/resettransform/reset_transform.go b/typedapi/transform/resettransform/reset_transform.go old mode 100755 new mode 100644 index 6d85f4a19f..cf1e48af0b --- a/typedapi/transform/resettransform/reset_transform.go +++ b/typedapi/transform/resettransform/reset_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Resets an existing transform. package resettransform @@ -171,7 +171,6 @@ func (r ResetTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/resettransform/response.go b/typedapi/transform/resettransform/response.go old mode 100755 new mode 100644 index 4f458dabf1..f339b9e4b2 --- a/typedapi/transform/resettransform/response.go +++ b/typedapi/transform/resettransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package resettransform // Response holds the response body struct for the package resettransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/transform/schedulenowtransform/response.go b/typedapi/transform/schedulenowtransform/response.go new file mode 100644 index 0000000000..ff19530480 --- /dev/null +++ b/typedapi/transform/schedulenowtransform/response.go @@ -0,0 +1,38 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package schedulenowtransform + +// Response holds the response body struct for the package schedulenowtransform +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/schedule_now_transform/ScheduleNowTransformResponse.ts#L21-L23 + +type Response struct { + + // Acknowledged For a successful response, this value is always true. On failure, an + // exception is returned instead. + Acknowledged bool `json:"acknowledged"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/transform/schedulenowtransform/schedule_now_transform.go b/typedapi/transform/schedulenowtransform/schedule_now_transform.go new file mode 100644 index 0000000000..e36ebeadf0 --- /dev/null +++ b/typedapi/transform/schedulenowtransform/schedule_now_transform.go @@ -0,0 +1,233 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Schedules now a transform. +package schedulenowtransform + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + transformidMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type ScheduleNowTransform struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + buf *gobytes.Buffer + + paramSet int + + transformid string +} + +// NewScheduleNowTransform type alias for index. +type NewScheduleNowTransform func(transformid string) *ScheduleNowTransform + +// NewScheduleNowTransformFunc returns a new instance of ScheduleNowTransform with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewScheduleNowTransformFunc(tp elastictransport.Interface) NewScheduleNowTransform { + return func(transformid string) *ScheduleNowTransform { + n := New(tp) + + n.TransformId(transformid) + + return n + } +} + +// Schedules now a transform. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/schedule-now-transform.html +func New(tp elastictransport.Interface) *ScheduleNowTransform { + r := &ScheduleNowTransform{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + buf: gobytes.NewBuffer(nil), + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *ScheduleNowTransform) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == transformidMask: + path.WriteString("/") + path.WriteString("_transform") + path.WriteString("/") + + path.WriteString(r.transformid) + path.WriteString("/") + path.WriteString("_schedule_now") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.buf) + } else { + req, err = http.NewRequest(method, r.path.String(), r.buf) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.buf.Len() > 0 { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r ScheduleNowTransform) Perform(ctx context.Context) (*http.Response, error) { + req, err := r.HttpRequest(ctx) + if err != nil { + return nil, err + } + + res, err := r.transport.Perform(req) + if err != nil { + return nil, fmt.Errorf("an error happened during the ScheduleNowTransform query execution: %w", err) + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a schedulenowtransform.Response +func (r ScheduleNowTransform) Do(ctx context.Context) (*Response, error) { + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + return nil, err + } + + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r ScheduleNowTransform) IsSuccess(ctx context.Context) (bool, error) { + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(ioutil.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + return false, nil +} + +// Header set a key, value pair in the ScheduleNowTransform headers map. +func (r *ScheduleNowTransform) Header(key, value string) *ScheduleNowTransform { + r.headers.Set(key, value) + + return r +} + +// TransformId Identifier for the transform. +// API Name: transformid +func (r *ScheduleNowTransform) TransformId(v string) *ScheduleNowTransform { + r.paramSet |= transformidMask + r.transformid = v + + return r +} + +// Timeout Controls the time to wait for the scheduling to take place +// API name: timeout +func (r *ScheduleNowTransform) Timeout(v string) *ScheduleNowTransform { + r.values.Set("timeout", v) + + return r +} diff --git a/typedapi/transform/starttransform/response.go b/typedapi/transform/starttransform/response.go old mode 100755 new mode 100644 index 443ab666a5..d636e90204 --- a/typedapi/transform/starttransform/response.go +++ b/typedapi/transform/starttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package starttransform // Response holds the response body struct for the package starttransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/transform/starttransform/start_transform.go b/typedapi/transform/starttransform/start_transform.go old mode 100755 new mode 100644 index 7520280d35..3258511fc3 --- a/typedapi/transform/starttransform/start_transform.go +++ b/typedapi/transform/starttransform/start_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts one or more transforms. package starttransform @@ -170,7 +170,6 @@ func (r StartTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() @@ -227,3 +226,13 @@ func (r *StartTransform) Timeout(v string) *StartTransform { return r } + +// From Restricts the set of transformed entities to those changed after this time. +// Relative times like now-30d are supported. Only applicable for continuous +// transforms. +// API name: from +func (r *StartTransform) From(v string) *StartTransform { + r.values.Set("from", v) + + return r +} diff --git a/typedapi/transform/stoptransform/response.go b/typedapi/transform/stoptransform/response.go old mode 100755 new mode 100644 index c312faca4f..b086c20e88 --- a/typedapi/transform/stoptransform/response.go +++ b/typedapi/transform/stoptransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stoptransform // Response holds the response body struct for the package stoptransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/transform/stoptransform/stop_transform.go b/typedapi/transform/stoptransform/stop_transform.go old mode 100755 new mode 100644 index 56e5d93fed..feb72bf065 --- a/typedapi/transform/stoptransform/stop_transform.go +++ b/typedapi/transform/stoptransform/stop_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops one or more transforms. package stoptransform @@ -171,7 +171,6 @@ func (r StopTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/updatetransform/request.go b/typedapi/transform/updatetransform/request.go old mode 100755 new mode 100644 index 8c74ed4033..8bb4e5fa38 --- a/typedapi/transform/updatetransform/request.go +++ b/typedapi/transform/updatetransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatetransform @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 type Request struct { // Description Free text description of the transform. @@ -42,7 +42,7 @@ type Request struct { // indexing. The minimum value is 1s and the maximum is 1h. Frequency types.Duration `json:"frequency,omitempty"` // Meta_ Defines optional transform metadata. - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` // RetentionPolicy Defines a retention policy for the transform. Data that meets the defined // criteria is deleted from the destination index. RetentionPolicy types.RetentionPolicyContainer `json:"retention_policy,omitempty"` diff --git a/typedapi/transform/updatetransform/response.go b/typedapi/transform/updatetransform/response.go old mode 100755 new mode 100644 index af5ea1264a..f5c3c0bd7d --- a/typedapi/transform/updatetransform/response.go +++ b/typedapi/transform/updatetransform/response.go @@ -16,19 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package updatetransform import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Response holds the response body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 type Response struct { Authorization *types.TransformAuthorization `json:"authorization,omitempty"` @@ -38,7 +36,7 @@ type Response struct { Frequency types.Duration `json:"frequency,omitempty"` Id string `json:"id"` Latest *types.Latest `json:"latest,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ types.Metadata `json:"_meta,omitempty"` Pivot *types.Pivot `json:"pivot,omitempty"` RetentionPolicy *types.RetentionPolicyContainer `json:"retention_policy,omitempty"` Settings types.Settings `json:"settings"` diff --git a/typedapi/transform/updatetransform/update_transform.go b/typedapi/transform/updatetransform/update_transform.go old mode 100755 new mode 100644 index e61d8fc321..f69483e173 --- a/typedapi/transform/updatetransform/update_transform.go +++ b/typedapi/transform/updatetransform/update_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Updates certain properties of a transform. package updatetransform @@ -206,7 +206,6 @@ func (r UpdateTransform) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/transform/upgradetransforms/response.go b/typedapi/transform/upgradetransforms/response.go old mode 100755 new mode 100644 index b8a1e50bbe..bff7bdae84 --- a/typedapi/transform/upgradetransforms/response.go +++ b/typedapi/transform/upgradetransforms/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package upgradetransforms // Response holds the response body struct for the package upgradetransforms // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 type Response struct { diff --git a/typedapi/transform/upgradetransforms/upgrade_transforms.go b/typedapi/transform/upgradetransforms/upgrade_transforms.go old mode 100755 new mode 100644 index 14532d9dd8..757ef9ae74 --- a/typedapi/transform/upgradetransforms/upgrade_transforms.go +++ b/typedapi/transform/upgradetransforms/upgrade_transforms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Upgrades all transforms. package upgradetransforms @@ -166,7 +166,6 @@ func (r UpgradeTransforms) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/types/acknowledgement.go b/typedapi/types/acknowledgement.go old mode 100755 new mode 100644 index b45ca4637f..e8325c9ee6 --- a/typedapi/types/acknowledgement.go +++ b/typedapi/types/acknowledgement.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Acknowledgement type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/post/types.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/post/types.ts#L20-L23 type Acknowledgement struct { License []string `json:"license"` Message string `json:"message"` diff --git a/typedapi/types/acknowledgestate.go b/typedapi/types/acknowledgestate.go old mode 100755 new mode 100644 index f819cc08ca..20b5963465 --- a/typedapi/types/acknowledgestate.go +++ b/typedapi/types/acknowledgestate.go @@ -16,22 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/acknowledgementoptions" + + "bytes" + "errors" + "io" + + "encoding/json" ) // AcknowledgeState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L112-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L112-L115 type AcknowledgeState struct { State acknowledgementoptions.AcknowledgementOptions `json:"state"` Timestamp DateTime `json:"timestamp"` } +func (s *AcknowledgeState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewAcknowledgeState returns a AcknowledgeState. func NewAcknowledgeState() *AcknowledgeState { r := &AcknowledgeState{} diff --git a/typedapi/types/actionstatus.go b/typedapi/types/actionstatus.go old mode 100755 new mode 100644 index 49b634ae00..15733b8527 --- a/typedapi/types/actionstatus.go +++ b/typedapi/types/actionstatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ActionStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L128-L133 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L128-L133 type ActionStatus struct { Ack AcknowledgeState `json:"ack"` LastExecution *ExecutionState `json:"last_execution,omitempty"` diff --git a/typedapi/types/activationstate.go b/typedapi/types/activationstate.go old mode 100755 new mode 100644 index 1b137732d3..09e7417281 --- a/typedapi/types/activationstate.go +++ b/typedapi/types/activationstate.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ActivationState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Activation.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Activation.ts#L24-L27 type ActivationState struct { Active bool `json:"active"` Timestamp DateTime `json:"timestamp"` } +func (s *ActivationState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Active = value + case bool: + s.Active = v + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewActivationState returns a ActivationState. func NewActivationState() *ActivationState { r := &ActivationState{} diff --git a/typedapi/types/activationstatus.go b/typedapi/types/activationstatus.go old mode 100755 new mode 100644 index 21a32d2ad5..936fa8ea2d --- a/typedapi/types/activationstatus.go +++ b/typedapi/types/activationstatus.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ActivationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Activation.ts#L29-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Activation.ts#L29-L33 type ActivationStatus struct { Actions WatcherStatusActions `json:"actions"` State ActivationState `json:"state"` Version int64 `json:"version"` } +func (s *ActivationStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewActivationStatus returns a ActivationStatus. func NewActivationStatus() *ActivationStatus { r := &ActivationStatus{} diff --git a/typedapi/types/adaptiveselection.go b/typedapi/types/adaptiveselection.go old mode 100755 new mode 100644 index 086805d155..ac76c0e4f2 --- a/typedapi/types/adaptiveselection.go +++ b/typedapi/types/adaptiveselection.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AdaptiveSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L169-L177 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L169-L177 type AdaptiveSelection struct { AvgQueueSize *int64 `json:"avg_queue_size,omitempty"` AvgResponseTime Duration `json:"avg_response_time,omitempty"` @@ -33,6 +43,104 @@ type AdaptiveSelection struct { Rank *string `json:"rank,omitempty"` } +func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg_queue_size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvgQueueSize = &value + case float64: + f := int64(v) + s.AvgQueueSize = &f + } + + case "avg_response_time": + if err := dec.Decode(&s.AvgResponseTime); err != nil { + return err + } + + case "avg_response_time_ns": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvgResponseTimeNs = &value + case float64: + f := int64(v) + s.AvgResponseTimeNs = &f + } + + case "avg_service_time": + if err := dec.Decode(&s.AvgServiceTime); err != nil { + return err + } + + case "avg_service_time_ns": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvgServiceTimeNs = &value + case float64: + f := int64(v) + s.AvgServiceTimeNs = &f + } + + case "outgoing_searches": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OutgoingSearches = &value + case float64: + f := int64(v) + s.OutgoingSearches = &f + } + + case "rank": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Rank = &o + + } + } + return nil +} + // NewAdaptiveSelection returns a AdaptiveSelection. func NewAdaptiveSelection() *AdaptiveSelection { r := &AdaptiveSelection{} diff --git a/typedapi/types/addaction.go b/typedapi/types/addaction.go old mode 100755 new mode 100644 index 539ca223fe..a2794ec4a0 --- a/typedapi/types/addaction.go +++ b/typedapi/types/addaction.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AddAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/types.ts#L30-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/types.ts#L30-L44 type AddAction struct { Alias *string `json:"alias,omitempty"` Aliases []string `json:"aliases,omitempty"` @@ -37,6 +47,130 @@ type AddAction struct { SearchRouting *string `json:"search_routing,omitempty"` } +func (s *AddAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alias": + if err := dec.Decode(&s.Alias); err != nil { + return err + } + + case "aliases": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Aliases = append(s.Aliases, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Aliases); err != nil { + return err + } + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "index_routing": + if err := dec.Decode(&s.IndexRouting); err != nil { + return err + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "is_hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsHidden = &value + case bool: + s.IsHidden = &v + } + + case "is_write_index": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsWriteIndex = &value + case bool: + s.IsWriteIndex = &v + } + + case "must_exist": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MustExist = &value + case bool: + s.MustExist = &v + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "search_routing": + if err := dec.Decode(&s.SearchRouting); err != nil { + return err + } + + } + } + return nil +} + // NewAddAction returns a AddAction. func NewAddAction() *AddAction { r := &AddAction{} diff --git a/typedapi/types/adjacencymatrixaggregate.go b/typedapi/types/adjacencymatrixaggregate.go old mode 100755 new mode 100644 index 671d742273..48a5266a47 --- a/typedapi/types/adjacencymatrixaggregate.go +++ b/typedapi/types/adjacencymatrixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // AdjacencyMatrixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L572-L574 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L573-L575 type AdjacencyMatrixAggregate struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *AdjacencyMatrixAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *AdjacencyMatrixAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]AdjacencyMatrixBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []AdjacencyMatrixBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/adjacencymatrixaggregation.go b/typedapi/types/adjacencymatrixaggregation.go old mode 100755 new mode 100644 index 4ea8dbeefe..722f1d4553 --- a/typedapi/types/adjacencymatrixaggregation.go +++ b/typedapi/types/adjacencymatrixaggregation.go @@ -16,21 +16,66 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // AdjacencyMatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L48-L50 type AdjacencyMatrixAggregation struct { - Filters map[string]Query `json:"filters,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Filters map[string]Query `json:"filters,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *AdjacencyMatrixAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filters": + if s.Filters == nil { + s.Filters = make(map[string]Query, 0) + } + if err := dec.Decode(&s.Filters); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewAdjacencyMatrixAggregation returns a AdjacencyMatrixAggregation. diff --git a/typedapi/types/adjacencymatrixbucket.go b/typedapi/types/adjacencymatrixbucket.go old mode 100755 new mode 100644 index f47c9c672f..56af736286 --- a/typedapi/types/adjacencymatrixbucket.go +++ b/typedapi/types/adjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // AdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L576-L578 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L577-L579 type AdjacencyMatrixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -42,6 +44,7 @@ type AdjacencyMatrixBucket struct { } func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,456 +58,540 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { + case "key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = o - case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } } } @@ -531,6 +618,7 @@ func (s AdjacencyMatrixBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/aggregate.go b/typedapi/types/aggregate.go old mode 100755 new mode 100644 index 4cb23337b4..e3a22d27c6 --- a/typedapi/types/aggregate.go +++ b/typedapi/types/aggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -79,6 +79,7 @@ package types // SignificantStringTermsAggregate // UnmappedSignificantTermsAggregate // CompositeAggregate +// FrequentItemSetsAggregate // ScriptedMetricAggregate // TopHitsAggregate // InferenceAggregate @@ -91,5 +92,5 @@ package types // MatrixStatsAggregate // GeoLineAggregate // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L38-L122 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L38-L123 type Aggregate interface{} diff --git a/typedapi/types/aggregatemetricdoubleproperty.go b/typedapi/types/aggregatemetricdoubleproperty.go old mode 100755 new mode 100644 index 4c4126ec1d..35ad27afec --- a/typedapi/types/aggregatemetricdoubleproperty.go +++ b/typedapi/types/aggregatemetricdoubleproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // AggregateMetricDoubleProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/complex.ts#L59-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/complex.ts#L59-L64 type AggregateMetricDoubleProperty struct { DefaultMetric string `json:"default_metric"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -48,6 +50,7 @@ type AggregateMetricDoubleProperty struct { } func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +65,12 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { switch t { case "default_metric": - if err := dec.Decode(&s.DefaultMetric); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.DefaultMetric = o case "dynamic": if err := dec.Decode(&s.Dynamic); err != nil { @@ -72,6 +78,9 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -359,18 +368,32 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -381,6 +404,9 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -668,7 +694,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/aggregateorder.go b/typedapi/types/aggregateorder.go old mode 100755 new mode 100644 index 4b05f8fe84..bf38860b8f --- a/typedapi/types/aggregateorder.go +++ b/typedapi/types/aggregateorder.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]sortorder.SortOrder // []map[string]sortorder.SortOrder // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L403-L405 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L403-L405 type AggregateOrder interface{} diff --git a/typedapi/types/aggregateoutput.go b/typedapi/types/aggregateoutput.go old mode 100755 new mode 100644 index b58a1364ac..fae4473745 --- a/typedapi/types/aggregateoutput.go +++ b/typedapi/types/aggregateoutput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AggregateOutput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L101-L106 type AggregateOutput struct { Exponent *Weights `json:"exponent,omitempty"` LogisticRegression *Weights `json:"logistic_regression,omitempty"` diff --git a/typedapi/types/aggregation.go b/typedapi/types/aggregation.go old mode 100755 new mode 100644 index dc4219c23e..4f912fee29 --- a/typedapi/types/aggregation.go +++ b/typedapi/types/aggregation.go @@ -16,20 +16,57 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // Aggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregation.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregation.ts#L22-L25 type Aggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *Aggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewAggregation returns a Aggregation. diff --git a/typedapi/types/aggregationbreakdown.go b/typedapi/types/aggregationbreakdown.go old mode 100755 new mode 100644 index b38cd9b173..4ac40baa66 --- a/typedapi/types/aggregationbreakdown.go +++ b/typedapi/types/aggregationbreakdown.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AggregationBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L23-L36 type AggregationBreakdown struct { BuildAggregation int64 `json:"build_aggregation"` BuildAggregationCount int64 `json:"build_aggregation_count"` @@ -38,6 +48,206 @@ type AggregationBreakdown struct { ReduceCount int64 `json:"reduce_count"` } +func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build_aggregation": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildAggregation = value + case float64: + f := int64(v) + s.BuildAggregation = f + } + + case "build_aggregation_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildAggregationCount = value + case float64: + f := int64(v) + s.BuildAggregationCount = f + } + + case "build_leaf_collector": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildLeafCollector = value + case float64: + f := int64(v) + s.BuildLeafCollector = f + } + + case "build_leaf_collector_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildLeafCollectorCount = value + case float64: + f := int64(v) + s.BuildLeafCollectorCount = f + } + + case "collect": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Collect = value + case float64: + f := int64(v) + s.Collect = f + } + + case "collect_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CollectCount = value + case float64: + f := int64(v) + s.CollectCount = f + } + + case "initialize": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Initialize = value + case float64: + f := int64(v) + s.Initialize = f + } + + case "initialize_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InitializeCount = value + case float64: + f := int64(v) + s.InitializeCount = f + } + + case "post_collection": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PostCollection = &value + case float64: + f := int64(v) + s.PostCollection = &f + } + + case "post_collection_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PostCollectionCount = &value + case float64: + f := int64(v) + s.PostCollectionCount = &f + } + + case "reduce": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Reduce = value + case float64: + f := int64(v) + s.Reduce = f + } + + case "reduce_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReduceCount = value + case float64: + f := int64(v) + s.ReduceCount = f + } + + } + } + return nil +} + // NewAggregationBreakdown returns a AggregationBreakdown. func NewAggregationBreakdown() *AggregationBreakdown { r := &AggregationBreakdown{} diff --git a/typedapi/types/aggregationprofile.go b/typedapi/types/aggregationprofile.go old mode 100755 new mode 100644 index 598fe48e2c..59ec5af040 --- a/typedapi/types/aggregationprofile.go +++ b/typedapi/types/aggregationprofile.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AggregationProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L77-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L77-L84 type AggregationProfile struct { Breakdown AggregationBreakdown `json:"breakdown"` Children []AggregationProfile `json:"children,omitempty"` @@ -32,6 +40,62 @@ type AggregationProfile struct { Type string `json:"type"` } +func (s *AggregationProfile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "breakdown": + if err := dec.Decode(&s.Breakdown); err != nil { + return err + } + + case "children": + if err := dec.Decode(&s.Children); err != nil { + return err + } + + case "debug": + if err := dec.Decode(&s.Debug); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "time_in_nanos": + if err := dec.Decode(&s.TimeInNanos); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewAggregationProfile returns a AggregationProfile. func NewAggregationProfile() *AggregationProfile { r := &AggregationProfile{} diff --git a/typedapi/types/aggregationprofiledebug.go b/typedapi/types/aggregationprofiledebug.go old mode 100755 new mode 100644 index a48596370d..77885769da --- a/typedapi/types/aggregationprofiledebug.go +++ b/typedapi/types/aggregationprofiledebug.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AggregationProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L39-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L39-L68 type AggregationProfileDebug struct { BuiltBuckets *int `json:"built_buckets,omitempty"` CharsFetched *int `json:"chars_fetched,omitempty"` @@ -54,6 +64,407 @@ type AggregationProfileDebug struct { ValuesFetched *int `json:"values_fetched,omitempty"` } +func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "built_buckets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BuiltBuckets = &value + case float64: + f := int(v) + s.BuiltBuckets = &f + } + + case "chars_fetched": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CharsFetched = &value + case float64: + f := int(v) + s.CharsFetched = &f + } + + case "collect_analyzed_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CollectAnalyzedCount = &value + case float64: + f := int(v) + s.CollectAnalyzedCount = &f + } + + case "collect_analyzed_ns": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CollectAnalyzedNs = &value + case float64: + f := int(v) + s.CollectAnalyzedNs = &f + } + + case "collection_strategy": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CollectionStrategy = &o + + case "deferred_aggregators": + if err := dec.Decode(&s.DeferredAggregators); err != nil { + return err + } + + case "delegate": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Delegate = &o + + case "delegate_debug": + if err := dec.Decode(&s.DelegateDebug); err != nil { + return err + } + + case "empty_collectors_used": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EmptyCollectorsUsed = &value + case float64: + f := int(v) + s.EmptyCollectorsUsed = &f + } + + case "extract_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ExtractCount = &value + case float64: + f := int(v) + s.ExtractCount = &f + } + + case "extract_ns": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ExtractNs = &value + case float64: + f := int(v) + s.ExtractNs = &f + } + + case "filters": + if err := dec.Decode(&s.Filters); err != nil { + return err + } + + case "has_filter": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HasFilter = &value + case bool: + s.HasFilter = &v + } + + case "map_reducer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MapReducer = &o + + case "numeric_collectors_used": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumericCollectorsUsed = &value + case float64: + f := int(v) + s.NumericCollectorsUsed = &f + } + + case "ordinals_collectors_overhead_too_high": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OrdinalsCollectorsOverheadTooHigh = &value + case float64: + f := int(v) + s.OrdinalsCollectorsOverheadTooHigh = &f + } + + case "ordinals_collectors_used": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OrdinalsCollectorsUsed = &value + case float64: + f := int(v) + s.OrdinalsCollectorsUsed = &f + } + + case "result_strategy": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultStrategy = &o + + case "segments_collected": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsCollected = &value + case float64: + f := int(v) + s.SegmentsCollected = &f + } + + case "segments_counted": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsCounted = &value + case float64: + f := int(v) + s.SegmentsCounted = &f + } + + case "segments_with_deleted_docs": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsWithDeletedDocs = &value + case float64: + f := int(v) + s.SegmentsWithDeletedDocs = &f + } + + case "segments_with_doc_count_field": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsWithDocCountField = &value + case float64: + f := int(v) + s.SegmentsWithDocCountField = &f + } + + case "segments_with_multi_valued_ords": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsWithMultiValuedOrds = &value + case float64: + f := int(v) + s.SegmentsWithMultiValuedOrds = &f + } + + case "segments_with_single_valued_ords": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsWithSingleValuedOrds = &value + case float64: + f := int(v) + s.SegmentsWithSingleValuedOrds = &f + } + + case "string_hashing_collectors_used": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.StringHashingCollectorsUsed = &value + case float64: + f := int(v) + s.StringHashingCollectorsUsed = &f + } + + case "surviving_buckets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SurvivingBuckets = &value + case float64: + f := int(v) + s.SurvivingBuckets = &f + } + + case "total_buckets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalBuckets = &value + case float64: + f := int(v) + s.TotalBuckets = &f + } + + case "values_fetched": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ValuesFetched = &value + case float64: + f := int(v) + s.ValuesFetched = &f + } + + } + } + return nil +} + // NewAggregationProfileDebug returns a AggregationProfileDebug. func NewAggregationProfileDebug() *AggregationProfileDebug { r := &AggregationProfileDebug{} diff --git a/typedapi/types/aggregationprofiledelegatedebugfilter.go b/typedapi/types/aggregationprofiledelegatedebugfilter.go old mode 100755 new mode 100644 index 49cc0a90ac..addce0b1ff --- a/typedapi/types/aggregationprofiledelegatedebugfilter.go +++ b/typedapi/types/aggregationprofiledelegatedebugfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AggregationProfileDelegateDebugFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L70-L75 type AggregationProfileDelegateDebugFilter struct { Query *string `json:"query,omitempty"` ResultsFromMetadata *int `json:"results_from_metadata,omitempty"` @@ -30,6 +40,74 @@ type AggregationProfileDelegateDebugFilter struct { SpecializedFor *string `json:"specialized_for,omitempty"` } +func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = &o + + case "results_from_metadata": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ResultsFromMetadata = &value + case float64: + f := int(v) + s.ResultsFromMetadata = &f + } + + case "segments_counted_in_constant_time": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SegmentsCountedInConstantTime = &value + case float64: + f := int(v) + s.SegmentsCountedInConstantTime = &f + } + + case "specialized_for": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SpecializedFor = &o + + } + } + return nil +} + // NewAggregationProfileDelegateDebugFilter returns a AggregationProfileDelegateDebugFilter. func NewAggregationProfileDelegateDebugFilter() *AggregationProfileDelegateDebugFilter { r := &AggregationProfileDelegateDebugFilter{} diff --git a/typedapi/types/aggregationrange.go b/typedapi/types/aggregationrange.go old mode 100755 new mode 100644 index 8e1473da92..f3869e199a --- a/typedapi/types/aggregationrange.go +++ b/typedapi/types/aggregationrange.go @@ -16,19 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // AggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L298-L302 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L298-L302 type AggregationRange struct { From string `json:"from,omitempty"` Key *string `json:"key,omitempty"` To string `json:"to,omitempty"` } +func (s *AggregationRange) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.From = o + + case "key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Key = &o + + case "to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.To = o + + } + } + return nil +} + // NewAggregationRange returns a AggregationRange. func NewAggregationRange() *AggregationRange { r := &AggregationRange{} diff --git a/typedapi/types/aggregations.go b/typedapi/types/aggregations.go old mode 100755 new mode 100644 index 0e0cde8a06..ab40ac38f1 --- a/typedapi/types/aggregations.go +++ b/typedapi/types/aggregations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // Aggregations type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/AggregationContainer.ts#L105-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/AggregationContainer.ts#L106-L211 type Aggregations struct { AdjacencyMatrix *AdjacencyMatrixAggregation `json:"adjacency_matrix,omitempty"` // Aggregations Sub-aggregations for this aggregation. Only applies to bucket aggregations. @@ -58,6 +58,7 @@ type Aggregations struct { ExtendedStatsBucket *ExtendedStatsBucketAggregation `json:"extended_stats_bucket,omitempty"` Filter *Query `json:"filter,omitempty"` Filters *FiltersAggregation `json:"filters,omitempty"` + FrequentItemSets *FrequentItemSetsAggregation `json:"frequent_item_sets,omitempty"` GeoBounds *GeoBoundsAggregation `json:"geo_bounds,omitempty"` GeoCentroid *GeoCentroidAggregation `json:"geo_centroid,omitempty"` GeoDistance *GeoDistanceAggregation `json:"geo_distance,omitempty"` @@ -75,7 +76,7 @@ type Aggregations struct { Max *MaxAggregation `json:"max,omitempty"` MaxBucket *MaxBucketAggregation `json:"max_bucket,omitempty"` MedianAbsoluteDeviation *MedianAbsoluteDeviationAggregation `json:"median_absolute_deviation,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Min *MinAggregation `json:"min,omitempty"` MinBucket *MinBucketAggregation `json:"min_bucket,omitempty"` Missing *MissingAggregation `json:"missing,omitempty"` @@ -113,6 +114,7 @@ type Aggregations struct { } func (s *Aggregations) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -132,6 +134,9 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { } case "aggregations", "aggs": + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregations, 0) + } if err := dec.Decode(&s.Aggregations); err != nil { return err } @@ -251,6 +256,11 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { return err } + case "frequent_item_sets": + if err := dec.Decode(&s.FrequentItemSets); err != nil { + return err + } + case "geo_bounds": if err := dec.Decode(&s.GeoBounds); err != nil { return err @@ -370,36 +380,36 @@ func (s *Aggregations) UnmarshalJSON(data []byte) error { case "linear": o := NewLinearMovingAverageAggregation() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.MovingAvg = *o case "simple": o := NewSimpleMovingAverageAggregation() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.MovingAvg = *o case "ewma": o := NewEwmaMovingAverageAggregation() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.MovingAvg = *o case "holt": o := NewHoltMovingAverageAggregation() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.MovingAvg = *o case "holt_winters": o := NewHoltWintersMovingAverageAggregation() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.MovingAvg = *o default: - if err := dec.Decode(&s.MovingAvg); err != nil { + if err := localDec.Decode(&s.MovingAvg); err != nil { return err } } diff --git a/typedapi/types/alias.go b/typedapi/types/alias.go old mode 100755 new mode 100644 index 86ade6152d..229b3bb974 --- a/typedapi/types/alias.go +++ b/typedapi/types/alias.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Alias type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/Alias.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/Alias.ts#L23-L30 type Alias struct { Filter *Query `json:"filter,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` @@ -32,6 +42,74 @@ type Alias struct { SearchRouting *string `json:"search_routing,omitempty"` } +func (s *Alias) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "index_routing": + if err := dec.Decode(&s.IndexRouting); err != nil { + return err + } + + case "is_hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsHidden = &value + case bool: + s.IsHidden = &v + } + + case "is_write_index": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsWriteIndex = &value + case bool: + s.IsWriteIndex = &v + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "search_routing": + if err := dec.Decode(&s.SearchRouting); err != nil { + return err + } + + } + } + return nil +} + // NewAlias returns a Alias. func NewAlias() *Alias { r := &Alias{} diff --git a/typedapi/types/aliasdefinition.go b/typedapi/types/aliasdefinition.go old mode 100755 new mode 100644 index fb46026e75..c916a1b235 --- a/typedapi/types/aliasdefinition.go +++ b/typedapi/types/aliasdefinition.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AliasDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/AliasDefinition.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/AliasDefinition.ts#L22-L30 type AliasDefinition struct { Filter *Query `json:"filter,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` @@ -32,6 +42,83 @@ type AliasDefinition struct { SearchRouting *string `json:"search_routing,omitempty"` } +func (s *AliasDefinition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "index_routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexRouting = &o + + case "is_hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsHidden = &value + case bool: + s.IsHidden = &v + } + + case "is_write_index": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsWriteIndex = &value + case bool: + s.IsWriteIndex = &v + } + + case "routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Routing = &o + + case "search_routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchRouting = &o + + } + } + return nil +} + // NewAliasDefinition returns a AliasDefinition. func NewAliasDefinition() *AliasDefinition { r := &AliasDefinition{} diff --git a/typedapi/types/aliasesrecord.go b/typedapi/types/aliasesrecord.go old mode 100755 new mode 100644 index d8e09e9ace..8c67f61018 --- a/typedapi/types/aliasesrecord.go +++ b/typedapi/types/aliasesrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AliasesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/aliases/types.ts#L22-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/aliases/types.ts#L22-L53 type AliasesRecord struct { // Alias alias name Alias *string `json:"alias,omitempty"` @@ -38,6 +46,71 @@ type AliasesRecord struct { RoutingSearch *string `json:"routing.search,omitempty"` } +func (s *AliasesRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alias", "a": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Alias = &o + + case "filter", "f", "fi": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Filter = &o + + case "index", "i", "idx": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "is_write_index", "w", "isWriteIndex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IsWriteIndex = &o + + case "routing.index", "ri", "routingIndex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RoutingIndex = &o + + case "routing.search", "rs", "routingSearch": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RoutingSearch = &o + + } + } + return nil +} + // NewAliasesRecord returns a AliasesRecord. func NewAliasesRecord() *AliasesRecord { r := &AliasesRecord{} diff --git a/typedapi/types/allfield.go b/typedapi/types/allfield.go old mode 100755 new mode 100644 index b26960749b..5ecb1a3c1c --- a/typedapi/types/allfield.go +++ b/typedapi/types/allfield.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AllField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L29-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L29-L40 type AllField struct { Analyzer string `json:"analyzer"` Enabled bool `json:"enabled"` @@ -36,6 +46,148 @@ type AllField struct { StoreTermVectors bool `json:"store_term_vectors"` } +func (s *AllField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = o + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "omit_norms": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OmitNorms = value + case bool: + s.OmitNorms = v + } + + case "search_analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchAnalyzer = o + + case "similarity": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Similarity = o + + case "store": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = value + case bool: + s.Store = v + } + + case "store_term_vector_offsets": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StoreTermVectorOffsets = value + case bool: + s.StoreTermVectorOffsets = v + } + + case "store_term_vector_payloads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StoreTermVectorPayloads = value + case bool: + s.StoreTermVectorPayloads = v + } + + case "store_term_vector_positions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StoreTermVectorPositions = value + case bool: + s.StoreTermVectorPositions = v + } + + case "store_term_vectors": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StoreTermVectors = value + case bool: + s.StoreTermVectors = v + } + + } + } + return nil +} + // NewAllField returns a AllField. func NewAllField() *AllField { r := &AllField{} diff --git a/typedapi/types/allocationdecision.go b/typedapi/types/allocationdecision.go old mode 100755 new mode 100644 index 2e1fd63bfd..0a76bad214 --- a/typedapi/types/allocationdecision.go +++ b/typedapi/types/allocationdecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // AllocationDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L26-L30 type AllocationDecision struct { Decider string `json:"decider"` Decision allocationexplaindecision.AllocationExplainDecision `json:"decision"` diff --git a/typedapi/types/allocationrecord.go b/typedapi/types/allocationrecord.go old mode 100755 new mode 100644 index 7fa7b4accd..1b7203e311 --- a/typedapi/types/allocationrecord.go +++ b/typedapi/types/allocationrecord.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // AllocationRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/allocation/types.ts#L24-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/allocation/types.ts#L24-L69 type AllocationRecord struct { // DiskAvail disk available DiskAvail ByteSize `json:"disk.avail,omitempty"` @@ -44,6 +51,77 @@ type AllocationRecord struct { Shards *string `json:"shards,omitempty"` } +func (s *AllocationRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "disk.avail", "da", "diskAvail": + if err := dec.Decode(&s.DiskAvail); err != nil { + return err + } + + case "disk.indices", "di", "diskIndices": + if err := dec.Decode(&s.DiskIndices); err != nil { + return err + } + + case "disk.percent", "dp", "diskPercent": + if err := dec.Decode(&s.DiskPercent); err != nil { + return err + } + + case "disk.total", "dt", "diskTotal": + if err := dec.Decode(&s.DiskTotal); err != nil { + return err + } + + case "disk.used", "du", "diskUsed": + if err := dec.Decode(&s.DiskUsed); err != nil { + return err + } + + case "host", "h": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "node", "n": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = &o + + case "shards", "s": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shards = &o + + } + } + return nil +} + // NewAllocationRecord returns a AllocationRecord. func NewAllocationRecord() *AllocationRecord { r := &AllocationRecord{} diff --git a/typedapi/types/allocationstore.go b/typedapi/types/allocationstore.go old mode 100755 new mode 100644 index 6e71d40306..10da7cb9cc --- a/typedapi/types/allocationstore.go +++ b/typedapi/types/allocationstore.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AllocationStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L39-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L39-L46 type AllocationStore struct { AllocationId string `json:"allocation_id"` Found bool `json:"found"` @@ -32,6 +42,99 @@ type AllocationStore struct { StoreException string `json:"store_exception"` } +func (s *AllocationStore) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AllocationId = o + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = value + case bool: + s.Found = v + } + + case "in_sync": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.InSync = value + case bool: + s.InSync = v + } + + case "matching_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MatchingSizeInBytes = value + case float64: + f := int64(v) + s.MatchingSizeInBytes = f + } + + case "matching_sync_id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MatchingSyncId = value + case bool: + s.MatchingSyncId = v + } + + case "store_exception": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StoreException = o + + } + } + return nil +} + // NewAllocationStore returns a AllocationStore. func NewAllocationStore() *AllocationStore { r := &AllocationStore{} diff --git a/typedapi/types/alwayscondition.go b/typedapi/types/alwayscondition.go old mode 100755 new mode 100644 index 0aa676836b..3d21621b55 --- a/typedapi/types/alwayscondition.go +++ b/typedapi/types/alwayscondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AlwaysCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L25-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L25-L25 type AlwaysCondition struct { } diff --git a/typedapi/types/analysisconfig.go b/typedapi/types/analysisconfig.go old mode 100755 new mode 100644 index a6184e8ac3..9bd08473fc --- a/typedapi/types/analysisconfig.go +++ b/typedapi/types/analysisconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,12 +25,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // AnalysisConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L29-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L29-L77 type AnalysisConfig struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or @@ -113,6 +115,7 @@ type AnalysisConfig struct { } func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -132,6 +135,7 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { } case "categorization_analyzer": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -181,8 +185,17 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { } case "multivariate_by_fields": - if err := dec.Decode(&s.MultivariateByFields); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MultivariateByFields = &value + case bool: + s.MultivariateByFields = &v } case "per_partition_categorization": diff --git a/typedapi/types/analysisconfigread.go b/typedapi/types/analysisconfigread.go old mode 100755 new mode 100644 index d640887d9f..a8d8fed7ac --- a/typedapi/types/analysisconfigread.go +++ b/typedapi/types/analysisconfigread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,12 +25,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // AnalysisConfigRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L79-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L79-L91 type AnalysisConfigRead struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or @@ -113,6 +115,7 @@ type AnalysisConfigRead struct { } func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -132,6 +135,7 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { } case "categorization_analyzer": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -181,8 +185,17 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { } case "multivariate_by_fields": - if err := dec.Decode(&s.MultivariateByFields); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MultivariateByFields = &value + case bool: + s.MultivariateByFields = &v } case "per_partition_categorization": diff --git a/typedapi/types/analysislimits.go b/typedapi/types/analysislimits.go old mode 100755 new mode 100644 index a1f1f58a0e..193b983289 --- a/typedapi/types/analysislimits.go +++ b/typedapi/types/analysislimits.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnalysisLimits type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L104-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L104-L115 type AnalysisLimits struct { // CategorizationExamplesLimit The maximum number of examples stored per category in memory and in the // results data store. If you increase this value, more examples are available, @@ -49,6 +59,49 @@ type AnalysisLimits struct { ModelMemoryLimit *string `json:"model_memory_limit,omitempty"` } +func (s *AnalysisLimits) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "categorization_examples_limit": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CategorizationExamplesLimit = &value + case float64: + f := int64(v) + s.CategorizationExamplesLimit = &f + } + + case "model_memory_limit": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelMemoryLimit = &o + + } + } + return nil +} + // NewAnalysisLimits returns a AnalysisLimits. func NewAnalysisLimits() *AnalysisLimits { r := &AnalysisLimits{} diff --git a/typedapi/types/analysismemorylimit.go b/typedapi/types/analysismemorylimit.go old mode 100755 new mode 100644 index 9d2be59c3d..198da8f29f --- a/typedapi/types/analysismemorylimit.go +++ b/typedapi/types/analysismemorylimit.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AnalysisMemoryLimit type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L117-L122 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L117-L122 type AnalysisMemoryLimit struct { // ModelMemoryLimit Limits can be applied for the resources required to hold the mathematical // models in memory. These limits are approximate and can be set per job. They diff --git a/typedapi/types/analytics.go b/typedapi/types/analytics.go old mode 100755 new mode 100644 index b67d6ba641..31e1f7a91a --- a/typedapi/types/analytics.go +++ b/typedapi/types/analytics.go @@ -16,19 +16,82 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Analytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L324-L326 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L324-L326 type Analytics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Stats AnalyticsStatistics `json:"stats"` } +func (s *Analytics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "stats": + if err := dec.Decode(&s.Stats); err != nil { + return err + } + + } + } + return nil +} + // NewAnalytics returns a Analytics. func NewAnalytics() *Analytics { r := &Analytics{} diff --git a/typedapi/types/analyticsstatistics.go b/typedapi/types/analyticsstatistics.go old mode 100755 new mode 100644 index be7866d91e..e254d09578 --- a/typedapi/types/analyticsstatistics.go +++ b/typedapi/types/analyticsstatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnalyticsStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L61-L71 type AnalyticsStatistics struct { BoxplotUsage int64 `json:"boxplot_usage"` CumulativeCardinalityUsage int64 `json:"cumulative_cardinality_usage"` @@ -35,6 +45,161 @@ type AnalyticsStatistics struct { TopMetricsUsage int64 `json:"top_metrics_usage"` } +func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boxplot_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BoxplotUsage = value + case float64: + f := int64(v) + s.BoxplotUsage = f + } + + case "cumulative_cardinality_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CumulativeCardinalityUsage = value + case float64: + f := int64(v) + s.CumulativeCardinalityUsage = f + } + + case "moving_percentiles_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MovingPercentilesUsage = value + case float64: + f := int64(v) + s.MovingPercentilesUsage = f + } + + case "multi_terms_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MultiTermsUsage = &value + case float64: + f := int64(v) + s.MultiTermsUsage = &f + } + + case "normalize_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NormalizeUsage = value + case float64: + f := int64(v) + s.NormalizeUsage = f + } + + case "rate_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RateUsage = value + case float64: + f := int64(v) + s.RateUsage = f + } + + case "string_stats_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StringStatsUsage = value + case float64: + f := int64(v) + s.StringStatsUsage = f + } + + case "t_test_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TTestUsage = value + case float64: + f := int64(v) + s.TTestUsage = f + } + + case "top_metrics_usage": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TopMetricsUsage = value + case float64: + f := int64(v) + s.TopMetricsUsage = f + } + + } + } + return nil +} + // NewAnalyticsStatistics returns a AnalyticsStatistics. func NewAnalyticsStatistics() *AnalyticsStatistics { r := &AnalyticsStatistics{} diff --git a/typedapi/types/analyzedetail.go b/typedapi/types/analyzedetail.go old mode 100755 new mode 100644 index a0d4150c99..4fb3070d6d --- a/typedapi/types/analyzedetail.go +++ b/typedapi/types/analyzedetail.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnalyzeDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L24-L30 type AnalyzeDetail struct { Analyzer *AnalyzerDetail `json:"analyzer,omitempty"` Charfilters []CharFilterDetail `json:"charfilters,omitempty"` @@ -31,6 +41,60 @@ type AnalyzeDetail struct { Tokenizer *TokenDetail `json:"tokenizer,omitempty"` } +func (s *AnalyzeDetail) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + if err := dec.Decode(&s.Analyzer); err != nil { + return err + } + + case "charfilters": + if err := dec.Decode(&s.Charfilters); err != nil { + return err + } + + case "custom_analyzer": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CustomAnalyzer = value + case bool: + s.CustomAnalyzer = v + } + + case "tokenfilters": + if err := dec.Decode(&s.Tokenfilters); err != nil { + return err + } + + case "tokenizer": + if err := dec.Decode(&s.Tokenizer); err != nil { + return err + } + + } + } + return nil +} + // NewAnalyzeDetail returns a AnalyzeDetail. func NewAnalyzeDetail() *AnalyzeDetail { r := &AnalyzeDetail{} diff --git a/typedapi/types/analyzer.go b/typedapi/types/analyzer.go old mode 100755 new mode 100644 index 282e7b8d1f..f9e0b0148e --- a/typedapi/types/analyzer.go +++ b/typedapi/types/analyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -37,5 +37,5 @@ package types // SnowballAnalyzer // DutchAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L113-L131 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L113-L131 type Analyzer interface{} diff --git a/typedapi/types/analyzerdetail.go b/typedapi/types/analyzerdetail.go old mode 100755 new mode 100644 index 255884fbc3..5e59dc26a8 --- a/typedapi/types/analyzerdetail.go +++ b/typedapi/types/analyzerdetail.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AnalyzerDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L32-L35 type AnalyzerDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` diff --git a/typedapi/types/analyzetoken.go b/typedapi/types/analyzetoken.go old mode 100755 new mode 100644 index 4aebe4ec71..771a020afc --- a/typedapi/types/analyzetoken.go +++ b/typedapi/types/analyzetoken.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L37-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L37-L44 type AnalyzeToken struct { EndOffset int64 `json:"end_offset"` Position int64 `json:"position"` @@ -32,6 +42,102 @@ type AnalyzeToken struct { Type string `json:"type"` } +func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "end_offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EndOffset = value + case float64: + f := int64(v) + s.EndOffset = f + } + + case "position": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Position = value + case float64: + f := int64(v) + s.Position = f + } + + case "positionLength": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PositionLength = &value + case float64: + f := int64(v) + s.PositionLength = &f + } + + case "start_offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StartOffset = value + case float64: + f := int64(v) + s.StartOffset = f + } + + case "token": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Token = o + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewAnalyzeToken returns a AnalyzeToken. func NewAnalyzeToken() *AnalyzeToken { r := &AnalyzeToken{} diff --git a/typedapi/types/anomaly.go b/typedapi/types/anomaly.go old mode 100755 new mode 100644 index 8424c8c5ff..a78e21da8e --- a/typedapi/types/anomaly.go +++ b/typedapi/types/anomaly.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Anomaly type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Anomaly.ts#L24-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Anomaly.ts#L24-L121 type Anomaly struct { // Actual The actual value for the bucket. Actual []Float64 `json:"actual,omitempty"` @@ -103,6 +113,232 @@ type Anomaly struct { Typical []Float64 `json:"typical,omitempty"` } +func (s *Anomaly) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual": + if err := dec.Decode(&s.Actual); err != nil { + return err + } + + case "anomaly_score_explanation": + if err := dec.Decode(&s.AnomalyScoreExplanation); err != nil { + return err + } + + case "bucket_span": + if err := dec.Decode(&s.BucketSpan); err != nil { + return err + } + + case "by_field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ByFieldName = &o + + case "by_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ByFieldValue = &o + + case "causes": + if err := dec.Decode(&s.Causes); err != nil { + return err + } + + case "detector_index": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DetectorIndex = value + case float64: + f := int(v) + s.DetectorIndex = f + } + + case "field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FieldName = &o + + case "function": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Function = &o + + case "function_description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FunctionDescription = &o + + case "geo_results": + if err := dec.Decode(&s.GeoResults); err != nil { + return err + } + + case "influencers": + if err := dec.Decode(&s.Influencers); err != nil { + return err + } + + case "initial_record_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.InitialRecordScore = f + case float64: + f := Float64(v) + s.InitialRecordScore = f + } + + case "is_interim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsInterim = value + case bool: + s.IsInterim = v + } + + case "job_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.JobId = o + + case "over_field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.OverFieldName = &o + + case "over_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.OverFieldValue = &o + + case "partition_field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldName = &o + + case "partition_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldValue = &o + + case "probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Probability = f + case float64: + f := Float64(v) + s.Probability = f + } + + case "record_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RecordScore = f + case float64: + f := Float64(v) + s.RecordScore = f + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "typical": + if err := dec.Decode(&s.Typical); err != nil { + return err + } + + } + } + return nil +} + // NewAnomaly returns a Anomaly. func NewAnomaly() *Anomaly { r := &Anomaly{} diff --git a/typedapi/types/anomalycause.go b/typedapi/types/anomalycause.go old mode 100755 new mode 100644 index c95a8a344f..3ed9b27a9f --- a/typedapi/types/anomalycause.go +++ b/typedapi/types/anomalycause.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnomalyCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Anomaly.ts#L123-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Anomaly.ts#L123-L138 type AnomalyCause struct { Actual []Float64 `json:"actual"` ByFieldName string `json:"by_field_name"` @@ -40,6 +50,128 @@ type AnomalyCause struct { Typical []Float64 `json:"typical"` } +func (s *AnomalyCause) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual": + if err := dec.Decode(&s.Actual); err != nil { + return err + } + + case "by_field_name": + if err := dec.Decode(&s.ByFieldName); err != nil { + return err + } + + case "by_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ByFieldValue = o + + case "correlated_by_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CorrelatedByFieldValue = o + + case "field_name": + if err := dec.Decode(&s.FieldName); err != nil { + return err + } + + case "function": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Function = o + + case "function_description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FunctionDescription = o + + case "influencers": + if err := dec.Decode(&s.Influencers); err != nil { + return err + } + + case "over_field_name": + if err := dec.Decode(&s.OverFieldName); err != nil { + return err + } + + case "over_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.OverFieldValue = o + + case "partition_field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldName = o + + case "partition_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldValue = o + + case "probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Probability = f + case float64: + f := Float64(v) + s.Probability = f + } + + case "typical": + if err := dec.Decode(&s.Typical); err != nil { + return err + } + + } + } + return nil +} + // NewAnomalyCause returns a AnomalyCause. func NewAnomalyCause() *AnomalyCause { r := &AnomalyCause{} diff --git a/typedapi/types/anomalydetectors.go b/typedapi/types/anomalydetectors.go old mode 100755 new mode 100644 index 318e8099a0..9dfa1eb66c --- a/typedapi/types/anomalydetectors.go +++ b/typedapi/types/anomalydetectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,12 +25,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // AnomalyDetectors type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/types.ts#L44-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/types.ts#L44-L50 type AnomalyDetectors struct { CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer"` CategorizationExamplesLimit int `json:"categorization_examples_limit"` @@ -40,6 +42,7 @@ type AnomalyDetectors struct { } func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -54,6 +57,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { switch t { case "categorization_analyzer": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -73,23 +77,59 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { } case "categorization_examples_limit": - if err := dec.Decode(&s.CategorizationExamplesLimit); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CategorizationExamplesLimit = value + case float64: + f := int(v) + s.CategorizationExamplesLimit = f } case "daily_model_snapshot_retention_after_days": - if err := dec.Decode(&s.DailyModelSnapshotRetentionAfterDays); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DailyModelSnapshotRetentionAfterDays = value + case float64: + f := int(v) + s.DailyModelSnapshotRetentionAfterDays = f } case "model_memory_limit": - if err := dec.Decode(&s.ModelMemoryLimit); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.ModelMemoryLimit = o case "model_snapshot_retention_days": - if err := dec.Decode(&s.ModelSnapshotRetentionDays); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ModelSnapshotRetentionDays = value + case float64: + f := int(v) + s.ModelSnapshotRetentionDays = f } } diff --git a/typedapi/types/anomalyexplanation.go b/typedapi/types/anomalyexplanation.go old mode 100755 new mode 100644 index c0d64d1ee9..88fc512815 --- a/typedapi/types/anomalyexplanation.go +++ b/typedapi/types/anomalyexplanation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AnomalyExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Anomaly.ts#L156-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Anomaly.ts#L156-L197 type AnomalyExplanation struct { // AnomalyCharacteristicsImpact Impact from the duration and magnitude of the detected anomaly relative to // the historical average. @@ -50,6 +60,174 @@ type AnomalyExplanation struct { UpperConfidenceBound *Float64 `json:"upper_confidence_bound,omitempty"` } +func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "anomaly_characteristics_impact": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AnomalyCharacteristicsImpact = &value + case float64: + f := int(v) + s.AnomalyCharacteristicsImpact = &f + } + + case "anomaly_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AnomalyLength = &value + case float64: + f := int(v) + s.AnomalyLength = &f + } + + case "anomaly_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AnomalyType = &o + + case "high_variance_penalty": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HighVariancePenalty = &value + case bool: + s.HighVariancePenalty = &v + } + + case "incomplete_bucket_penalty": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncompleteBucketPenalty = &value + case bool: + s.IncompleteBucketPenalty = &v + } + + case "lower_confidence_bound": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.LowerConfidenceBound = &f + case float64: + f := Float64(v) + s.LowerConfidenceBound = &f + } + + case "multi_bucket_impact": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MultiBucketImpact = &value + case float64: + f := int(v) + s.MultiBucketImpact = &f + } + + case "single_bucket_impact": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SingleBucketImpact = &value + case float64: + f := int(v) + s.SingleBucketImpact = &f + } + + case "typical_value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.TypicalValue = &f + case float64: + f := Float64(v) + s.TypicalValue = &f + } + + case "upper_confidence_bound": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.UpperConfidenceBound = &f + case float64: + f := Float64(v) + s.UpperConfidenceBound = &f + } + + } + } + return nil +} + // NewAnomalyExplanation returns a AnomalyExplanation. func NewAnomalyExplanation() *AnomalyExplanation { r := &AnomalyExplanation{} diff --git a/typedapi/types/apikey.go b/typedapi/types/apikey.go old mode 100755 new mode 100644 index a1bd0084dc..0efe417862 --- a/typedapi/types/apikey.go +++ b/typedapi/types/apikey.go @@ -16,24 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // ApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/ApiKey.ts#L27-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/ApiKey.ts#L27-L41 type ApiKey struct { Creation *int64 `json:"creation,omitempty"` Expiration *int64 `json:"expiration,omitempty"` Id string `json:"id"` Invalidated *bool `json:"invalidated,omitempty"` LimitedBy []map[string]RoleDescriptor `json:"limited_by,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` Name string `json:"name"` Realm *string `json:"realm,omitempty"` RoleDescriptors map[string]RoleDescriptor `json:"role_descriptors,omitempty"` @@ -41,6 +47,116 @@ type ApiKey struct { Username *string `json:"username,omitempty"` } +func (s *ApiKey) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "creation": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Creation = &value + case float64: + f := int64(v) + s.Creation = &f + } + + case "expiration": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Expiration = &value + case float64: + f := int64(v) + s.Expiration = &f + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "invalidated": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Invalidated = &value + case bool: + s.Invalidated = &v + } + + case "limited_by": + if err := dec.Decode(&s.LimitedBy); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "realm": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Realm = &o + + case "role_descriptors": + if s.RoleDescriptors == nil { + s.RoleDescriptors = make(map[string]RoleDescriptor, 0) + } + if err := dec.Decode(&s.RoleDescriptors); err != nil { + return err + } + + case "_sort": + if err := dec.Decode(&s.Sort_); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil +} + // NewApiKey returns a ApiKey. func NewApiKey() *ApiKey { r := &ApiKey{ diff --git a/typedapi/types/apikeyauthorization.go b/typedapi/types/apikeyauthorization.go old mode 100755 new mode 100644 index dd5d94d889..5eea9fa57c --- a/typedapi/types/apikeyauthorization.go +++ b/typedapi/types/apikeyauthorization.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ApiKeyAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Authorization.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Authorization.ts#L20-L29 type ApiKeyAuthorization struct { // Id The identifier for the API key. Id string `json:"id"` diff --git a/typedapi/types/appendprocessor.go b/typedapi/types/appendprocessor.go old mode 100755 new mode 100644 index 3953c9e73c..40afb61c37 --- a/typedapi/types/appendprocessor.go +++ b/typedapi/types/appendprocessor.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // AppendProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L90-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L90-L94 type AppendProcessor struct { AllowDuplicates *bool `json:"allow_duplicates,omitempty"` Description *string `json:"description,omitempty"` @@ -38,6 +44,93 @@ type AppendProcessor struct { Value []json.RawMessage `json:"value"` } +func (s *AppendProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_duplicates": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowDuplicates = &value + case bool: + s.AllowDuplicates = &v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + } + } + return nil +} + // NewAppendProcessor returns a AppendProcessor. func NewAppendProcessor() *AppendProcessor { r := &AppendProcessor{} diff --git a/typedapi/types/applicationglobaluserprivileges.go b/typedapi/types/applicationglobaluserprivileges.go old mode 100755 new mode 100644 index c04240adb9..03210e5fbe --- a/typedapi/types/applicationglobaluserprivileges.go +++ b/typedapi/types/applicationglobaluserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ApplicationGlobalUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L191-L193 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L193-L195 type ApplicationGlobalUserPrivileges struct { Manage ManageUserPrivileges `json:"manage"` } diff --git a/typedapi/types/applicationprivileges.go b/typedapi/types/applicationprivileges.go old mode 100755 new mode 100644 index 8425f62666..74b397a252 --- a/typedapi/types/applicationprivileges.go +++ b/typedapi/types/applicationprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ApplicationPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L26-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L26-L39 type ApplicationPrivileges struct { // Application The name of the application to which this entry applies. Application string `json:"application"` diff --git a/typedapi/types/applicationprivilegescheck.go b/typedapi/types/applicationprivilegescheck.go old mode 100755 new mode 100644 index 2404696284..b44591eecb --- a/typedapi/types/applicationprivilegescheck.go +++ b/typedapi/types/applicationprivilegescheck.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ApplicationPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/types.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/types.ts#L24-L31 type ApplicationPrivilegesCheck struct { // Application The name of the application. Application string `json:"application"` diff --git a/typedapi/types/applicationsprivileges.go b/typedapi/types/applicationsprivileges.go old mode 100755 new mode 100644 index 4337327554..f875e1c75f --- a/typedapi/types/applicationsprivileges.go +++ b/typedapi/types/applicationsprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ApplicationsPrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/types.ts#L46-L46 -type ApplicationsPrivileges map[string]map[string]map[string]bool +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/types.ts#L46-L46 +type ApplicationsPrivileges map[string]ResourcePrivileges diff --git a/typedapi/types/archive.go b/typedapi/types/archive.go old mode 100755 new mode 100644 index ca1cf5b53d..bd74c0980d --- a/typedapi/types/archive.go +++ b/typedapi/types/archive.go @@ -16,19 +16,92 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Archive type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L48-L50 type Archive struct { Available bool `json:"available"` Enabled bool `json:"enabled"` IndicesCount int64 `json:"indices_count"` } +func (s *Archive) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "indices_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndicesCount = value + case float64: + f := int64(v) + s.IndicesCount = f + } + + } + } + return nil +} + // NewArchive returns a Archive. func NewArchive() *Archive { r := &Archive{} diff --git a/typedapi/types/arraycomparecondition.go b/typedapi/types/arraycomparecondition.go old mode 100755 new mode 100644 index a1ba8c5771..3464d9cdd5 --- a/typedapi/types/arraycomparecondition.go +++ b/typedapi/types/arraycomparecondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,9 +29,9 @@ import ( // ArrayCompareCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L32-L36 type ArrayCompareCondition struct { - ArrayCompareCondition map[conditionop.ConditionOp]ArrayCompareOpParams `json:"-"` + ArrayCompareCondition map[conditionop.ConditionOp]ArrayCompareOpParams `json:"ArrayCompareCondition,omitempty"` Path string `json:"path"` } @@ -54,6 +54,7 @@ func (s ArrayCompareCondition) MarshalJSON() ([]byte, error) { for key, value := range s.ArrayCompareCondition { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "ArrayCompareCondition") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/arraycompareopparams.go b/typedapi/types/arraycompareopparams.go old mode 100755 new mode 100644 index 421849d922..441742afeb --- a/typedapi/types/arraycompareopparams.go +++ b/typedapi/types/arraycompareopparams.go @@ -16,22 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/quantifier" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ArrayCompareOpParams type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L27-L30 type ArrayCompareOpParams struct { Quantifier quantifier.Quantifier `json:"quantifier"` Value FieldValue `json:"value"` } +func (s *ArrayCompareOpParams) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "quantifier": + if err := dec.Decode(&s.Quantifier); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + } + } + return nil +} + // NewArrayCompareOpParams returns a ArrayCompareOpParams. func NewArrayCompareOpParams() *ArrayCompareOpParams { r := &ArrayCompareOpParams{} diff --git a/typedapi/types/arraypercentilesitem.go b/typedapi/types/arraypercentilesitem.go old mode 100755 new mode 100644 index 10cbad56cc..5d8d9e2c8a --- a/typedapi/types/arraypercentilesitem.go +++ b/typedapi/types/arraypercentilesitem.go @@ -16,19 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // ArrayPercentilesItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L159-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L160-L164 type ArrayPercentilesItem struct { Key string `json:"key"` Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *ArrayPercentilesItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Key = o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewArrayPercentilesItem returns a ArrayPercentilesItem. func NewArrayPercentilesItem() *ArrayPercentilesItem { r := &ArrayPercentilesItem{} diff --git a/typedapi/types/asciifoldingtokenfilter.go b/typedapi/types/asciifoldingtokenfilter.go old mode 100755 new mode 100644 index 26c05402e5..829e9e1948 --- a/typedapi/types/asciifoldingtokenfilter.go +++ b/typedapi/types/asciifoldingtokenfilter.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AsciiFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L167-L170 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L168-L171 type AsciiFoldingTokenFilter struct { PreserveOriginal *bool `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *AsciiFoldingTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewAsciiFoldingTokenFilter returns a AsciiFoldingTokenFilter. func NewAsciiFoldingTokenFilter() *AsciiFoldingTokenFilter { r := &AsciiFoldingTokenFilter{} diff --git a/typedapi/types/asyncsearch.go b/typedapi/types/asyncsearch.go old mode 100755 new mode 100644 index 159c8a68c2..f74848c211 --- a/typedapi/types/asyncsearch.go +++ b/typedapi/types/asyncsearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // AsyncSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/async_search/_types/AsyncSearch.ts#L30-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/async_search/_types/AsyncSearch.ts#L30-L45 type AsyncSearch struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -51,6 +53,7 @@ type AsyncSearch struct { } func (s *AsyncSearch) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,6 +68,10 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -77,415 +84,494 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } switch elems[0] { + case "cardinality": o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -512,6 +598,9 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -522,13 +611,34 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MaxScore = &f + case float64: + f := Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -552,23 +662,54 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/types/attachmentprocessor.go b/typedapi/types/attachmentprocessor.go old mode 100755 new mode 100644 index 5499c45707..4418f3ce18 --- a/typedapi/types/attachmentprocessor.go +++ b/typedapi/types/attachmentprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AttachmentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L96-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L96-L104 type AttachmentProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -38,6 +48,126 @@ type AttachmentProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "indexed_chars": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexedChars = &value + case float64: + f := int64(v) + s.IndexedChars = &f + } + + case "indexed_chars_field": + if err := dec.Decode(&s.IndexedCharsField); err != nil { + return err + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "properties": + if err := dec.Decode(&s.Properties); err != nil { + return err + } + + case "resource_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResourceName = &o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewAttachmentProcessor returns a AttachmentProcessor. func NewAttachmentProcessor() *AttachmentProcessor { r := &AttachmentProcessor{} diff --git a/typedapi/types/audit.go b/typedapi/types/audit.go old mode 100755 new mode 100644 index 091ba06e38..50ff308725 --- a/typedapi/types/audit.go +++ b/typedapi/types/audit.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Audit type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L73-L75 type Audit struct { Enabled bool `json:"enabled"` Outputs []string `json:"outputs,omitempty"` } +func (s *Audit) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "outputs": + if err := dec.Decode(&s.Outputs); err != nil { + return err + } + + } + } + return nil +} + // NewAudit returns a Audit. func NewAudit() *Audit { r := &Audit{} diff --git a/typedapi/types/authenticateduser.go b/typedapi/types/authenticateduser.go old mode 100755 new mode 100644 index 03c23422bf..1c55b07e17 --- a/typedapi/types/authenticateduser.go +++ b/typedapi/types/authenticateduser.go @@ -16,29 +16,125 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // AuthenticatedUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/types.ts#L40-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/types.ts#L40-L45 type AuthenticatedUser struct { - AuthenticationProvider *AuthenticationProvider `json:"authentication_provider,omitempty"` - AuthenticationRealm UserRealm `json:"authentication_realm"` - AuthenticationType string `json:"authentication_type"` - Email string `json:"email,omitempty"` - Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` - LookupRealm UserRealm `json:"lookup_realm"` - Metadata map[string]json.RawMessage `json:"metadata"` - ProfileUid *string `json:"profile_uid,omitempty"` - Roles []string `json:"roles"` - Username string `json:"username"` + AuthenticationProvider *AuthenticationProvider `json:"authentication_provider,omitempty"` + AuthenticationRealm UserRealm `json:"authentication_realm"` + AuthenticationType string `json:"authentication_type"` + Email string `json:"email,omitempty"` + Enabled bool `json:"enabled"` + FullName string `json:"full_name,omitempty"` + LookupRealm UserRealm `json:"lookup_realm"` + Metadata Metadata `json:"metadata"` + ProfileUid *string `json:"profile_uid,omitempty"` + Roles []string `json:"roles"` + Username string `json:"username"` +} + +func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "authentication_provider": + if err := dec.Decode(&s.AuthenticationProvider); err != nil { + return err + } + + case "authentication_realm": + if err := dec.Decode(&s.AuthenticationRealm); err != nil { + return err + } + + case "authentication_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AuthenticationType = o + + case "email": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Email = o + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "full_name": + if err := dec.Decode(&s.FullName); err != nil { + return err + } + + case "lookup_realm": + if err := dec.Decode(&s.LookupRealm); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "profile_uid": + if err := dec.Decode(&s.ProfileUid); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil } // NewAuthenticatedUser returns a AuthenticatedUser. diff --git a/typedapi/types/authenticatetoken.go b/typedapi/types/authenticatetoken.go old mode 100755 new mode 100644 index f91f5c20c0..2701ca68a4 --- a/typedapi/types/authenticatetoken.go +++ b/typedapi/types/authenticatetoken.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AuthenticateToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/authenticate/types.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/authenticate/types.ts#L22-L26 type AuthenticateToken struct { Name string `json:"name"` Type *string `json:"type,omitempty"` } +func (s *AuthenticateToken) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + } + } + return nil +} + // NewAuthenticateToken returns a AuthenticateToken. func NewAuthenticateToken() *AuthenticateToken { r := &AuthenticateToken{} diff --git a/typedapi/types/authenticationprovider.go b/typedapi/types/authenticationprovider.go old mode 100755 new mode 100644 index 5db63ba605..6dacfa34d7 --- a/typedapi/types/authenticationprovider.go +++ b/typedapi/types/authenticationprovider.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AuthenticationProvider type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/types.ts#L35-L38 type AuthenticationProvider struct { Name string `json:"name"` Type string `json:"type"` } +func (s *AuthenticationProvider) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewAuthenticationProvider returns a AuthenticationProvider. func NewAuthenticationProvider() *AuthenticationProvider { r := &AuthenticationProvider{} diff --git a/typedapi/types/autodatehistogramaggregate.go b/typedapi/types/autodatehistogramaggregate.go old mode 100755 new mode 100644 index be44c1783a..dbd14bcc32 --- a/typedapi/types/autodatehistogramaggregate.go +++ b/typedapi/types/autodatehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,14 +30,15 @@ import ( // AutoDateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L355-L359 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L356-L360 type AutoDateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Interval string `json:"interval"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *AutoDateHistogramAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -58,15 +59,17 @@ func (s *AutoDateHistogramAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DateHistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DateHistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/autodatehistogramaggregation.go b/typedapi/types/autodatehistogramaggregation.go old mode 100755 new mode 100644 index 27b23e6f37..a171bfd309 --- a/typedapi/types/autodatehistogramaggregation.go +++ b/typedapi/types/autodatehistogramaggregation.go @@ -16,24 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/minimuminterval" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // AutoDateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L52-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L52-L62 type AutoDateHistogramAggregation struct { Buckets *int `json:"buckets,omitempty"` Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinimumInterval *minimuminterval.MinimumInterval `json:"minimum_interval,omitempty"` Missing DateTime `json:"missing,omitempty"` Name *string `json:"name,omitempty"` @@ -43,6 +49,104 @@ type AutoDateHistogramAggregation struct { TimeZone *string `json:"time_zone,omitempty"` } +func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buckets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Buckets = &value + case float64: + f := int(v) + s.Buckets = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "minimum_interval": + if err := dec.Decode(&s.MinimumInterval); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "offset": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Offset = &o + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + } + } + return nil +} + // NewAutoDateHistogramAggregation returns a AutoDateHistogramAggregation. func NewAutoDateHistogramAggregation() *AutoDateHistogramAggregation { r := &AutoDateHistogramAggregation{ diff --git a/typedapi/types/autofollowedcluster.go b/typedapi/types/autofollowedcluster.go old mode 100755 new mode 100644 index 48af81517c..25961d0d69 --- a/typedapi/types/autofollowedcluster.go +++ b/typedapi/types/autofollowedcluster.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AutoFollowedCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/stats/types.ts.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/stats/types.ts.ts#L27-L31 type AutoFollowedCluster struct { ClusterName string `json:"cluster_name"` LastSeenMetadataVersion int64 `json:"last_seen_metadata_version"` TimeSinceLastCheckMillis int64 `json:"time_since_last_check_millis"` } +func (s *AutoFollowedCluster) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cluster_name": + if err := dec.Decode(&s.ClusterName); err != nil { + return err + } + + case "last_seen_metadata_version": + if err := dec.Decode(&s.LastSeenMetadataVersion); err != nil { + return err + } + + case "time_since_last_check_millis": + if err := dec.Decode(&s.TimeSinceLastCheckMillis); err != nil { + return err + } + + } + } + return nil +} + // NewAutoFollowedCluster returns a AutoFollowedCluster. func NewAutoFollowedCluster() *AutoFollowedCluster { r := &AutoFollowedCluster{} diff --git a/typedapi/types/autofollowpattern.go b/typedapi/types/autofollowpattern.go old mode 100755 new mode 100644 index 994e3ceb24..6ed265e7d3 --- a/typedapi/types/autofollowpattern.go +++ b/typedapi/types/autofollowpattern.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AutoFollowPattern type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 type AutoFollowPattern struct { Name string `json:"name"` Pattern AutoFollowPatternSummary `json:"pattern"` } +func (s *AutoFollowPattern) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "pattern": + if err := dec.Decode(&s.Pattern); err != nil { + return err + } + + } + } + return nil +} + // NewAutoFollowPattern returns a AutoFollowPattern. func NewAutoFollowPattern() *AutoFollowPattern { r := &AutoFollowPattern{} diff --git a/typedapi/types/autofollowpatternsummary.go b/typedapi/types/autofollowpatternsummary.go old mode 100755 new mode 100644 index 0012e1443b..139dd8fd1b --- a/typedapi/types/autofollowpatternsummary.go +++ b/typedapi/types/autofollowpatternsummary.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AutoFollowPatternSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/get_auto_follow_pattern/types.ts#L28-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/get_auto_follow_pattern/types.ts#L28-L51 type AutoFollowPatternSummary struct { Active bool `json:"active"` // FollowIndexPattern The name of follower index. @@ -39,6 +49,79 @@ type AutoFollowPatternSummary struct { RemoteCluster string `json:"remote_cluster"` } +func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Active = value + case bool: + s.Active = v + } + + case "follow_index_pattern": + if err := dec.Decode(&s.FollowIndexPattern); err != nil { + return err + } + + case "leader_index_exclusion_patterns": + if err := dec.Decode(&s.LeaderIndexExclusionPatterns); err != nil { + return err + } + + case "leader_index_patterns": + if err := dec.Decode(&s.LeaderIndexPatterns); err != nil { + return err + } + + case "max_outstanding_read_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOutstandingReadRequests = value + case float64: + f := int(v) + s.MaxOutstandingReadRequests = f + } + + case "remote_cluster": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RemoteCluster = o + + } + } + return nil +} + // NewAutoFollowPatternSummary returns a AutoFollowPatternSummary. func NewAutoFollowPatternSummary() *AutoFollowPatternSummary { r := &AutoFollowPatternSummary{} diff --git a/typedapi/types/autofollowstats.go b/typedapi/types/autofollowstats.go old mode 100755 new mode 100644 index 13958c6cdb..eede532f2f --- a/typedapi/types/autofollowstats.go +++ b/typedapi/types/autofollowstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AutoFollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/stats/types.ts.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/stats/types.ts.ts#L33-L39 type AutoFollowStats struct { AutoFollowedClusters []AutoFollowedCluster `json:"auto_followed_clusters"` NumberOfFailedFollowIndices int64 `json:"number_of_failed_follow_indices"` @@ -31,6 +41,81 @@ type AutoFollowStats struct { RecentAutoFollowErrors []ErrorCause `json:"recent_auto_follow_errors"` } +func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auto_followed_clusters": + if err := dec.Decode(&s.AutoFollowedClusters); err != nil { + return err + } + + case "number_of_failed_follow_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumberOfFailedFollowIndices = value + case float64: + f := int64(v) + s.NumberOfFailedFollowIndices = f + } + + case "number_of_failed_remote_cluster_state_requests": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumberOfFailedRemoteClusterStateRequests = value + case float64: + f := int64(v) + s.NumberOfFailedRemoteClusterStateRequests = f + } + + case "number_of_successful_follow_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumberOfSuccessfulFollowIndices = value + case float64: + f := int64(v) + s.NumberOfSuccessfulFollowIndices = f + } + + case "recent_auto_follow_errors": + if err := dec.Decode(&s.RecentAutoFollowErrors); err != nil { + return err + } + + } + } + return nil +} + // NewAutoFollowStats returns a AutoFollowStats. func NewAutoFollowStats() *AutoFollowStats { r := &AutoFollowStats{} diff --git a/typedapi/types/autoscalingcapacity.go b/typedapi/types/autoscalingcapacity.go old mode 100755 new mode 100644 index 3ea536f588..17f70c89e4 --- a/typedapi/types/autoscalingcapacity.go +++ b/typedapi/types/autoscalingcapacity.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AutoscalingCapacity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 type AutoscalingCapacity struct { Node AutoscalingResources `json:"node"` Total AutoscalingResources `json:"total"` diff --git a/typedapi/types/autoscalingdecider.go b/typedapi/types/autoscalingdecider.go old mode 100755 new mode 100644 index bbc7e848a2..e642128d6b --- a/typedapi/types/autoscalingdecider.go +++ b/typedapi/types/autoscalingdecider.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // AutoscalingDecider type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 type AutoscalingDecider struct { ReasonDetails json.RawMessage `json:"reason_details,omitempty"` ReasonSummary *string `json:"reason_summary,omitempty"` diff --git a/typedapi/types/autoscalingdeciders.go b/typedapi/types/autoscalingdeciders.go old mode 100755 new mode 100644 index ac136497c0..740d2a7e3f --- a/typedapi/types/autoscalingdeciders.go +++ b/typedapi/types/autoscalingdeciders.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // AutoscalingDeciders type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 type AutoscalingDeciders struct { CurrentCapacity AutoscalingCapacity `json:"current_capacity"` CurrentNodes []AutoscalingNode `json:"current_nodes"` diff --git a/typedapi/types/autoscalingnode.go b/typedapi/types/autoscalingnode.go old mode 100755 new mode 100644 index 7f79b2e93d..3fce6bb373 --- a/typedapi/types/autoscalingnode.go +++ b/typedapi/types/autoscalingnode.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AutoscalingNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 type AutoscalingNode struct { Name string `json:"name"` } +func (s *AutoscalingNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewAutoscalingNode returns a AutoscalingNode. func NewAutoscalingNode() *AutoscalingNode { r := &AutoscalingNode{} diff --git a/typedapi/types/autoscalingpolicy.go b/typedapi/types/autoscalingpolicy.go old mode 100755 new mode 100644 index 3919d61afa..0ed18af7c1 --- a/typedapi/types/autoscalingpolicy.go +++ b/typedapi/types/autoscalingpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // AutoscalingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 type AutoscalingPolicy struct { // Deciders Decider settings Deciders map[string]json.RawMessage `json:"deciders"` diff --git a/typedapi/types/autoscalingresources.go b/typedapi/types/autoscalingresources.go old mode 100755 new mode 100644 index c203cd6c05..46e929ff3f --- a/typedapi/types/autoscalingresources.go +++ b/typedapi/types/autoscalingresources.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // AutoscalingResources type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 type AutoscalingResources struct { Memory int `json:"memory"` Storage int `json:"storage"` } +func (s *AutoscalingResources) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "memory": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Memory = value + case float64: + f := int(v) + s.Memory = f + } + + case "storage": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Storage = value + case float64: + f := int(v) + s.Storage = f + } + + } + } + return nil +} + // NewAutoscalingResources returns a AutoscalingResources. func NewAutoscalingResources() *AutoscalingResources { r := &AutoscalingResources{} diff --git a/typedapi/types/averageaggregation.go b/typedapi/types/averageaggregation.go old mode 100755 new mode 100644 index f871fbc32e..934dd5aa6b --- a/typedapi/types/averageaggregation.go +++ b/typedapi/types/averageaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // AverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L48-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L48-L48 type AverageAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type AverageAggregation struct { Script Script `json:"script,omitempty"` } +func (s *AverageAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewAverageAggregation returns a AverageAggregation. func NewAverageAggregation() *AverageAggregation { r := &AverageAggregation{} diff --git a/typedapi/types/averagebucketaggregation.go b/typedapi/types/averagebucketaggregation.go old mode 100755 new mode 100644 index 27102b1955..cc0fc4f94e --- a/typedapi/types/averagebucketaggregation.go +++ b/typedapi/types/averagebucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // AverageBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L69-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L69-L69 type AverageBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/avgaggregate.go b/typedapi/types/avgaggregate.go old mode 100755 new mode 100644 index f5c17fdc14..a2f40f9674 --- a/typedapi/types/avgaggregate.go +++ b/typedapi/types/avgaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // AvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L208-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L209-L210 type AvgAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type AvgAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *AvgAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewAvgAggregate returns a AvgAggregate. func NewAvgAggregate() *AvgAggregate { r := &AvgAggregate{} diff --git a/typedapi/types/base.go b/typedapi/types/base.go old mode 100755 new mode 100644 index 0b7dc46148..6456d3a712 --- a/typedapi/types/base.go +++ b/typedapi/types/base.go @@ -16,18 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Base type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L30-L33 type Base struct { Available bool `json:"available"` Enabled bool `json:"enabled"` } +func (s *Base) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewBase returns a Base. func NewBase() *Base { r := &Base{} diff --git a/typedapi/types/baseindicator.go b/typedapi/types/baseindicator.go new file mode 100644 index 0000000000..3b38388d82 --- /dev/null +++ b/typedapi/types/baseindicator.go @@ -0,0 +1,42 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// BaseIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L41-L46 +type BaseIndicator struct { + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewBaseIndicator returns a BaseIndicator. +func NewBaseIndicator() *BaseIndicator { + r := &BaseIndicator{} + + return r +} diff --git a/typedapi/types/basenode.go b/typedapi/types/basenode.go old mode 100755 new mode 100644 index 461102f8d2..b7af403482 --- a/typedapi/types/basenode.go +++ b/typedapi/types/basenode.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "encoding/json" ) // BaseNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_spec_utils/BaseNode.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/BaseNode.ts#L25-L32 type BaseNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` @@ -36,6 +42,59 @@ type BaseNode struct { TransportAddress string `json:"transport_address"` } +func (s *BaseNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewBaseNode returns a BaseNode. func NewBaseNode() *BaseNode { r := &BaseNode{ diff --git a/typedapi/types/binaryproperty.go b/typedapi/types/binaryproperty.go old mode 100755 new mode 100644 index 6f64596ede..d944bdd664 --- a/typedapi/types/binaryproperty.go +++ b/typedapi/types/binaryproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // BinaryProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L49-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L49-L51 type BinaryProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -48,6 +50,7 @@ type BinaryProperty struct { } func (s *BinaryProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,13 +65,33 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -77,6 +100,9 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -364,23 +390,40 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -668,20 +711,32 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/booleanproperty.go b/typedapi/types/booleanproperty.go old mode 100755 new mode 100644 index af13e99702..c5deaa50db --- a/typedapi/types/booleanproperty.go +++ b/typedapi/types/booleanproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // BooleanProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L53-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L53-L59 type BooleanProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -52,6 +54,7 @@ type BooleanProperty struct { } func (s *BooleanProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -66,18 +69,49 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -91,6 +125,9 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -378,33 +415,68 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.NullValue = &value + case bool: + s.NullValue = &v } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -692,20 +764,32 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/boolquery.go b/typedapi/types/boolquery.go old mode 100755 new mode 100644 index 143d4f6aba..ea57c9986e --- a/typedapi/types/boolquery.go +++ b/typedapi/types/boolquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BoolQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L28-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L28-L34 type BoolQuery struct { Boost *float32 `json:"boost,omitempty"` Filter []Query `json:"filter,omitempty"` @@ -33,6 +43,119 @@ type BoolQuery struct { Should []Query `json:"should,omitempty"` } +func (s *BoolQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return err + } + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "must": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Must = append(s.Must, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Must); err != nil { + return err + } + } + + case "must_not": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.MustNot = append(s.MustNot, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MustNot); err != nil { + return err + } + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "should": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Should = append(s.Should, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Should); err != nil { + return err + } + } + + } + } + return nil +} + // NewBoolQuery returns a BoolQuery. func NewBoolQuery() *BoolQuery { r := &BoolQuery{} diff --git a/typedapi/types/boostingquery.go b/typedapi/types/boostingquery.go old mode 100755 new mode 100644 index 7e670be592..d1b50f3204 --- a/typedapi/types/boostingquery.go +++ b/typedapi/types/boostingquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BoostingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L36-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L36-L40 type BoostingQuery struct { Boost *float32 `json:"boost,omitempty"` Negative *Query `json:"negative,omitempty"` @@ -31,6 +41,76 @@ type BoostingQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *BoostingQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "negative": + if err := dec.Decode(&s.Negative); err != nil { + return err + } + + case "negative_boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.NegativeBoost = f + case float64: + f := Float64(v) + s.NegativeBoost = f + } + + case "positive": + if err := dec.Decode(&s.Positive); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewBoostingQuery returns a BoostingQuery. func NewBoostingQuery() *BoostingQuery { r := &BoostingQuery{} diff --git a/typedapi/types/boxplotaggregate.go b/typedapi/types/boxplotaggregate.go old mode 100755 new mode 100644 index 1572873440..8a544dc225 --- a/typedapi/types/boxplotaggregate.go +++ b/typedapi/types/boxplotaggregate.go @@ -16,33 +16,232 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // BoxPlotAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L697-L713 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L706-L722 type BoxPlotAggregate struct { - Lower Float64 `json:"lower"` - LowerAsString *string `json:"lower_as_string,omitempty"` - Max Float64 `json:"max"` - MaxAsString *string `json:"max_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Min Float64 `json:"min"` - MinAsString *string `json:"min_as_string,omitempty"` - Q1 Float64 `json:"q1"` - Q1AsString *string `json:"q1_as_string,omitempty"` - Q2 Float64 `json:"q2"` - Q2AsString *string `json:"q2_as_string,omitempty"` - Q3 Float64 `json:"q3"` - Q3AsString *string `json:"q3_as_string,omitempty"` - Upper Float64 `json:"upper"` - UpperAsString *string `json:"upper_as_string,omitempty"` + Lower Float64 `json:"lower"` + LowerAsString *string `json:"lower_as_string,omitempty"` + Max Float64 `json:"max"` + MaxAsString *string `json:"max_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Min Float64 `json:"min"` + MinAsString *string `json:"min_as_string,omitempty"` + Q1 Float64 `json:"q1"` + Q1AsString *string `json:"q1_as_string,omitempty"` + Q2 Float64 `json:"q2"` + Q2AsString *string `json:"q2_as_string,omitempty"` + Q3 Float64 `json:"q3"` + Q3AsString *string `json:"q3_as_string,omitempty"` + Upper Float64 `json:"upper"` + UpperAsString *string `json:"upper_as_string,omitempty"` +} + +func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lower": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lower = f + case float64: + f := Float64(v) + s.Lower = f + } + + case "lower_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LowerAsString = &o + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f + } + + case "max_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f + } + + case "min_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinAsString = &o + + case "q1": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Q1 = f + case float64: + f := Float64(v) + s.Q1 = f + } + + case "q1_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Q1AsString = &o + + case "q2": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Q2 = f + case float64: + f := Float64(v) + s.Q2 = f + } + + case "q2_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Q2AsString = &o + + case "q3": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Q3 = f + case float64: + f := Float64(v) + s.Q3 = f + } + + case "q3_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Q3AsString = &o + + case "upper": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Upper = f + case float64: + f := Float64(v) + s.Upper = f + } + + case "upper_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UpperAsString = &o + + } + } + return nil } // NewBoxPlotAggregate returns a BoxPlotAggregate. diff --git a/typedapi/types/boxplotaggregation.go b/typedapi/types/boxplotaggregation.go old mode 100755 new mode 100644 index 3763fd0985..38a4658819 --- a/typedapi/types/boxplotaggregation.go +++ b/typedapi/types/boxplotaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BoxplotAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L50-L52 type BoxplotAggregation struct { Compression *Float64 `json:"compression,omitempty"` Field *string `json:"field,omitempty"` @@ -30,6 +40,57 @@ type BoxplotAggregation struct { Script Script `json:"script,omitempty"` } +func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compression": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Compression = &f + case float64: + f := Float64(v) + s.Compression = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewBoxplotAggregation returns a BoxplotAggregation. func NewBoxplotAggregation() *BoxplotAggregation { r := &BoxplotAggregation{} diff --git a/typedapi/types/breaker.go b/typedapi/types/breaker.go old mode 100755 new mode 100644 index ad0f2c9764..d98043aed0 --- a/typedapi/types/breaker.go +++ b/typedapi/types/breaker.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Breaker type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L179-L186 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L179-L186 type Breaker struct { EstimatedSize *string `json:"estimated_size,omitempty"` EstimatedSizeInBytes *int64 `json:"estimated_size_in_bytes,omitempty"` @@ -32,6 +42,104 @@ type Breaker struct { Tripped *float32 `json:"tripped,omitempty"` } +func (s *Breaker) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "estimated_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.EstimatedSize = &o + + case "estimated_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EstimatedSizeInBytes = &value + case float64: + f := int64(v) + s.EstimatedSizeInBytes = &f + } + + case "limit_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LimitSize = &o + + case "limit_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LimitSizeInBytes = &value + case float64: + f := int64(v) + s.LimitSizeInBytes = &f + } + + case "overhead": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Overhead = &f + case float64: + f := float32(v) + s.Overhead = &f + } + + case "tripped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Tripped = &f + case float64: + f := float32(v) + s.Tripped = &f + } + + } + } + return nil +} + // NewBreaker returns a Breaker. func NewBreaker() *Breaker { r := &Breaker{} diff --git a/typedapi/types/bucketcorrelationaggregation.go b/typedapi/types/bucketcorrelationaggregation.go old mode 100755 new mode 100644 index 6c7f4e7ab5..2c1bbf69ab --- a/typedapi/types/bucketcorrelationaggregation.go +++ b/typedapi/types/bucketcorrelationaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,17 +30,18 @@ import ( // BucketCorrelationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L114-L120 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L114-L120 type BucketCorrelationAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` // Function The correlation function to execute. - Function BucketCorrelationFunction `json:"function"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Function BucketCorrelationFunction `json:"function"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -70,9 +71,12 @@ func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/bucketcorrelationfunction.go b/typedapi/types/bucketcorrelationfunction.go old mode 100755 new mode 100644 index d79bb981b3..9eb6d603c0 --- a/typedapi/types/bucketcorrelationfunction.go +++ b/typedapi/types/bucketcorrelationfunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // BucketCorrelationFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L122-L127 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L122-L127 type BucketCorrelationFunction struct { // CountCorrelation The configuration to calculate a count correlation. This function is designed // for determining the correlation of a term value and a given metric. diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go old mode 100755 new mode 100644 index 956c299ccc..95b32ba88a --- a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // BucketCorrelationFunctionCountCorrelation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L129-L132 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L129-L132 type BucketCorrelationFunctionCountCorrelation struct { // Indicator The indicator with which to correlate the configured `bucket_path` values. Indicator BucketCorrelationFunctionCountCorrelationIndicator `json:"indicator"` diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go old mode 100755 new mode 100644 index 7953ff4ddd..24655b75e5 --- a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BucketCorrelationFunctionCountCorrelationIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L134-L152 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L134-L152 type BucketCorrelationFunctionCountCorrelationIndicator struct { // DocCount The total number of documents that initially created the expectations. It’s // required to be greater @@ -43,6 +53,52 @@ type BucketCorrelationFunctionCountCorrelationIndicator struct { Fractions []Float64 `json:"fractions,omitempty"` } +func (s *BucketCorrelationFunctionCountCorrelationIndicator) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int(v) + s.DocCount = f + } + + case "expectations": + if err := dec.Decode(&s.Expectations); err != nil { + return err + } + + case "fractions": + if err := dec.Decode(&s.Fractions); err != nil { + return err + } + + } + } + return nil +} + // NewBucketCorrelationFunctionCountCorrelationIndicator returns a BucketCorrelationFunctionCountCorrelationIndicator. func NewBucketCorrelationFunctionCountCorrelationIndicator() *BucketCorrelationFunctionCountCorrelationIndicator { r := &BucketCorrelationFunctionCountCorrelationIndicator{} diff --git a/typedapi/types/bucketinfluencer.go b/typedapi/types/bucketinfluencer.go old mode 100755 new mode 100644 index 70539717d9..f287f91259 --- a/typedapi/types/bucketinfluencer.go +++ b/typedapi/types/bucketinfluencer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BucketInfluencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Bucket.ts#L80-L128 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Bucket.ts#L80-L128 type BucketInfluencer struct { // AnomalyScore A normalized score between 0-100, which is calculated for each bucket // influencer. This score might be updated as @@ -58,6 +68,137 @@ type BucketInfluencer struct { TimestampString DateTime `json:"timestamp_string,omitempty"` } +func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.AnomalyScore = f + case float64: + f := Float64(v) + s.AnomalyScore = f + } + + case "bucket_span": + if err := dec.Decode(&s.BucketSpan); err != nil { + return err + } + + case "influencer_field_name": + if err := dec.Decode(&s.InfluencerFieldName); err != nil { + return err + } + + case "initial_anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.InitialAnomalyScore = f + case float64: + f := Float64(v) + s.InitialAnomalyScore = f + } + + case "is_interim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsInterim = value + case bool: + s.IsInterim = v + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Probability = f + case float64: + f := Float64(v) + s.Probability = f + } + + case "raw_anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RawAnomalyScore = f + case float64: + f := Float64(v) + s.RawAnomalyScore = f + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timestamp_string": + if err := dec.Decode(&s.TimestampString); err != nil { + return err + } + + } + } + return nil +} + // NewBucketInfluencer returns a BucketInfluencer. func NewBucketInfluencer() *BucketInfluencer { r := &BucketInfluencer{} diff --git a/typedapi/types/bucketksaggregation.go b/typedapi/types/bucketksaggregation.go old mode 100755 new mode 100644 index 895e069bae..2f0e6b2f61 --- a/typedapi/types/bucketksaggregation.go +++ b/typedapi/types/bucketksaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // BucketKsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L79-L112 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L79-L112 type BucketKsAggregation struct { // Alternative A list of string values indicating which K-S test alternative to calculate. // The valid values @@ -50,9 +50,9 @@ type BucketKsAggregation struct { // documents are uniformly distributed on these buckets, which they would be if // one used equal percentiles of a // metric to define the bucket end points. - Fractions []Float64 `json:"fractions,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Fractions []Float64 `json:"fractions,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` // SamplingMethod Indicates the sampling methodology when calculating the K-S test. Note, this // is sampling of the returned values. // This determines the cumulative distribution function (CDF) points used @@ -64,6 +64,7 @@ type BucketKsAggregation struct { } func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -98,14 +99,20 @@ func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "sampling_method": - if err := dec.Decode(&s.SamplingMethod); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SamplingMethod = &o } } diff --git a/typedapi/types/bucketmetricvalueaggregate.go b/typedapi/types/bucketmetricvalueaggregate.go old mode 100755 new mode 100644 index ac39746a1c..e8218433e5 --- a/typedapi/types/bucketmetricvalueaggregate.go +++ b/typedapi/types/bucketmetricvalueaggregate.go @@ -16,20 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // BucketMetricValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L232-L235 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L233-L236 type BucketMetricValueAggregate struct { - Keys []string `json:"keys"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Keys []string `json:"keys"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -37,6 +41,49 @@ type BucketMetricValueAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *BucketMetricValueAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "keys": + if err := dec.Decode(&s.Keys); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewBucketMetricValueAggregate returns a BucketMetricValueAggregate. func NewBucketMetricValueAggregate() *BucketMetricValueAggregate { r := &BucketMetricValueAggregate{} diff --git a/typedapi/types/bucketpathaggregation.go b/typedapi/types/bucketpathaggregation.go old mode 100755 new mode 100644 index a832763b25..11f88f90ce --- a/typedapi/types/bucketpathaggregation.go +++ b/typedapi/types/bucketpathaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,15 +30,16 @@ import ( // BucketPathAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L31-L37 type BucketPathAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +64,12 @@ func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/bucketsadjacencymatrixbucket.go b/typedapi/types/bucketsadjacencymatrixbucket.go old mode 100755 new mode 100644 index 35d17ffca8..f00fccf853 --- a/typedapi/types/bucketsadjacencymatrixbucket.go +++ b/typedapi/types/bucketsadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]AdjacencyMatrixBucket // []AdjacencyMatrixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsAdjacencyMatrixBucket interface{} diff --git a/typedapi/types/bucketscompositebucket.go b/typedapi/types/bucketscompositebucket.go old mode 100755 new mode 100644 index a2d1ddf46d..3a45b412fd --- a/typedapi/types/bucketscompositebucket.go +++ b/typedapi/types/bucketscompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]CompositeBucket // []CompositeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsCompositeBucket interface{} diff --git a/typedapi/types/bucketscriptaggregation.go b/typedapi/types/bucketscriptaggregation.go old mode 100755 new mode 100644 index 593d73f8e3..1aee8fa216 --- a/typedapi/types/bucketscriptaggregation.go +++ b/typedapi/types/bucketscriptaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,18 +32,19 @@ import ( // BucketScriptAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L71-L73 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L71-L73 type BucketScriptAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Script Script `json:"script,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Script Script `json:"script,omitempty"` } func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +64,12 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -78,9 +82,12 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "script": if err := dec.Decode(&s.Script); err != nil { diff --git a/typedapi/types/bucketsdatehistogrambucket.go b/typedapi/types/bucketsdatehistogrambucket.go old mode 100755 new mode 100644 index 9f1dc266fa..9eb2748791 --- a/typedapi/types/bucketsdatehistogrambucket.go +++ b/typedapi/types/bucketsdatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]DateHistogramBucket // []DateHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsDateHistogramBucket interface{} diff --git a/typedapi/types/bucketsdoubletermsbucket.go b/typedapi/types/bucketsdoubletermsbucket.go old mode 100755 new mode 100644 index 59d3c870e8..765ab6af2f --- a/typedapi/types/bucketsdoubletermsbucket.go +++ b/typedapi/types/bucketsdoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]DoubleTermsBucket // []DoubleTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsDoubleTermsBucket interface{} diff --git a/typedapi/types/bucketselectoraggregation.go b/typedapi/types/bucketselectoraggregation.go old mode 100755 new mode 100644 index 861a201553..ef3376ac10 --- a/typedapi/types/bucketselectoraggregation.go +++ b/typedapi/types/bucketselectoraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,18 +32,19 @@ import ( // BucketSelectorAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L75-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L75-L77 type BucketSelectorAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Script Script `json:"script,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Script Script `json:"script,omitempty"` } func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +64,12 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -78,9 +82,12 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "script": if err := dec.Decode(&s.Script); err != nil { diff --git a/typedapi/types/bucketsfiltersbucket.go b/typedapi/types/bucketsfiltersbucket.go old mode 100755 new mode 100644 index 8c32136a17..0b9bc47d06 --- a/typedapi/types/bucketsfiltersbucket.go +++ b/typedapi/types/bucketsfiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]FiltersBucket // []FiltersBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsFiltersBucket interface{} diff --git a/typedapi/indices/exists/response.go b/typedapi/types/bucketsfrequentitemsetsbucket.go old mode 100755 new mode 100644 similarity index 64% rename from typedapi/indices/exists/response.go rename to typedapi/types/bucketsfrequentitemsetsbucket.go index 7b5504ac15..e4477e1002 --- a/typedapi/indices/exists/response.go +++ b/typedapi/types/bucketsfrequentitemsetsbucket.go @@ -16,19 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package exists +package types -// Response holds the response body struct for the package exists +// BucketsFrequentItemSetsBucket holds the union for the following types: // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/exists/IndicesExistsResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} +// map[string]FrequentItemSetsBucket +// []FrequentItemSetsBucket +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsFrequentItemSetsBucket interface{} diff --git a/typedapi/types/bucketsgeohashgridbucket.go b/typedapi/types/bucketsgeohashgridbucket.go old mode 100755 new mode 100644 index d67142a234..c97feb64f0 --- a/typedapi/types/bucketsgeohashgridbucket.go +++ b/typedapi/types/bucketsgeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]GeoHashGridBucket // []GeoHashGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoHashGridBucket interface{} diff --git a/typedapi/types/bucketsgeohexgridbucket.go b/typedapi/types/bucketsgeohexgridbucket.go old mode 100755 new mode 100644 index 858719bf55..39fbf901ca --- a/typedapi/types/bucketsgeohexgridbucket.go +++ b/typedapi/types/bucketsgeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]GeoHexGridBucket // []GeoHexGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoHexGridBucket interface{} diff --git a/typedapi/types/bucketsgeotilegridbucket.go b/typedapi/types/bucketsgeotilegridbucket.go old mode 100755 new mode 100644 index bd1279a095..21feae899e --- a/typedapi/types/bucketsgeotilegridbucket.go +++ b/typedapi/types/bucketsgeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]GeoTileGridBucket // []GeoTileGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsGeoTileGridBucket interface{} diff --git a/typedapi/types/bucketshistogrambucket.go b/typedapi/types/bucketshistogrambucket.go old mode 100755 new mode 100644 index 31a731b2d8..fa1dfeafd2 --- a/typedapi/types/bucketshistogrambucket.go +++ b/typedapi/types/bucketshistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]HistogramBucket // []HistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsHistogramBucket interface{} diff --git a/typedapi/types/bucketsipprefixbucket.go b/typedapi/types/bucketsipprefixbucket.go old mode 100755 new mode 100644 index b1f7d1ccb8..e2b9ea76b3 --- a/typedapi/types/bucketsipprefixbucket.go +++ b/typedapi/types/bucketsipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]IpPrefixBucket // []IpPrefixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsIpPrefixBucket interface{} diff --git a/typedapi/types/bucketsiprangebucket.go b/typedapi/types/bucketsiprangebucket.go old mode 100755 new mode 100644 index f09a53d60b..35ccfc494b --- a/typedapi/types/bucketsiprangebucket.go +++ b/typedapi/types/bucketsiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]IpRangeBucket // []IpRangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsIpRangeBucket interface{} diff --git a/typedapi/types/bucketslongraretermsbucket.go b/typedapi/types/bucketslongraretermsbucket.go old mode 100755 new mode 100644 index 35a572e8e8..262dfc3aae --- a/typedapi/types/bucketslongraretermsbucket.go +++ b/typedapi/types/bucketslongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]LongRareTermsBucket // []LongRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsLongRareTermsBucket interface{} diff --git a/typedapi/types/bucketslongtermsbucket.go b/typedapi/types/bucketslongtermsbucket.go old mode 100755 new mode 100644 index 58effa6fa8..732e8dc710 --- a/typedapi/types/bucketslongtermsbucket.go +++ b/typedapi/types/bucketslongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]LongTermsBucket // []LongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsLongTermsBucket interface{} diff --git a/typedapi/types/bucketsmultitermsbucket.go b/typedapi/types/bucketsmultitermsbucket.go old mode 100755 new mode 100644 index a7f48f75d2..5a64a13b89 --- a/typedapi/types/bucketsmultitermsbucket.go +++ b/typedapi/types/bucketsmultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]MultiTermsBucket // []MultiTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsMultiTermsBucket interface{} diff --git a/typedapi/types/bucketsortaggregation.go b/typedapi/types/bucketsortaggregation.go old mode 100755 new mode 100644 index f1d5752fd3..41e11d0b56 --- a/typedapi/types/bucketsortaggregation.go +++ b/typedapi/types/bucketsortaggregation.go @@ -16,26 +16,118 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/gappolicy" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // BucketSortAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L154-L159 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L154-L159 type BucketSortAggregation struct { - From *int `json:"from,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Size *int `json:"size,omitempty"` - Sort []SortCombinations `json:"sort,omitempty"` + From *int `json:"from,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Size *int `json:"size,omitempty"` + Sort []SortCombinations `json:"sort,omitempty"` +} + +func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.From = &value + case float64: + f := int(v) + s.From = &f + } + + case "gap_policy": + if err := dec.Decode(&s.GapPolicy); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + } + } + return nil } // NewBucketSortAggregation returns a BucketSortAggregation. diff --git a/typedapi/types/bucketspath.go b/typedapi/types/bucketspath.go old mode 100755 new mode 100644 index 4ca3eba220..dd83ab5b9b --- a/typedapi/types/bucketspath.go +++ b/typedapi/types/bucketspath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ package types // []string // map[string]string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L44-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L44-L50 type BucketsPath interface{} diff --git a/typedapi/types/bucketsquery.go b/typedapi/types/bucketsquery.go old mode 100755 new mode 100644 index 4dd68c0ff8..11c21a86dc --- a/typedapi/types/bucketsquery.go +++ b/typedapi/types/bucketsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]Query // []Query // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsQuery interface{} diff --git a/typedapi/types/bucketsrangebucket.go b/typedapi/types/bucketsrangebucket.go old mode 100755 new mode 100644 index df07440d82..e85c9bd3b4 --- a/typedapi/types/bucketsrangebucket.go +++ b/typedapi/types/bucketsrangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]RangeBucket // []RangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsRangeBucket interface{} diff --git a/typedapi/types/bucketssignificantlongtermsbucket.go b/typedapi/types/bucketssignificantlongtermsbucket.go old mode 100755 new mode 100644 index 1164be4dde..cada6b4c52 --- a/typedapi/types/bucketssignificantlongtermsbucket.go +++ b/typedapi/types/bucketssignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]SignificantLongTermsBucket // []SignificantLongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsSignificantLongTermsBucket interface{} diff --git a/typedapi/types/bucketssignificantstringtermsbucket.go b/typedapi/types/bucketssignificantstringtermsbucket.go old mode 100755 new mode 100644 index d9b10e3c8e..695a869f39 --- a/typedapi/types/bucketssignificantstringtermsbucket.go +++ b/typedapi/types/bucketssignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]SignificantStringTermsBucket // []SignificantStringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsSignificantStringTermsBucket interface{} diff --git a/typedapi/types/bucketsstringraretermsbucket.go b/typedapi/types/bucketsstringraretermsbucket.go old mode 100755 new mode 100644 index 03a257ae8a..57ac7c02da --- a/typedapi/types/bucketsstringraretermsbucket.go +++ b/typedapi/types/bucketsstringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]StringRareTermsBucket // []StringRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsStringRareTermsBucket interface{} diff --git a/typedapi/types/bucketsstringtermsbucket.go b/typedapi/types/bucketsstringtermsbucket.go old mode 100755 new mode 100644 index b403a55682..a27ea249c7 --- a/typedapi/types/bucketsstringtermsbucket.go +++ b/typedapi/types/bucketsstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]StringTermsBucket // []StringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsStringTermsBucket interface{} diff --git a/typedapi/types/bucketsummary.go b/typedapi/types/bucketsummary.go old mode 100755 new mode 100644 index ca80be81c3..ae85208a49 --- a/typedapi/types/bucketsummary.go +++ b/typedapi/types/bucketsummary.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BucketSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Bucket.ts#L31-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Bucket.ts#L31-L78 type BucketSummary struct { // AnomalyScore The maximum anomaly score, between 0-100, for any of the bucket influencers. // This is an overall, rate-limited @@ -60,6 +70,125 @@ type BucketSummary struct { TimestampString DateTime `json:"timestamp_string,omitempty"` } +func (s *BucketSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.AnomalyScore = f + case float64: + f := Float64(v) + s.AnomalyScore = f + } + + case "bucket_influencers": + if err := dec.Decode(&s.BucketInfluencers); err != nil { + return err + } + + case "bucket_span": + if err := dec.Decode(&s.BucketSpan); err != nil { + return err + } + + case "event_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EventCount = value + case float64: + f := int64(v) + s.EventCount = f + } + + case "initial_anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.InitialAnomalyScore = f + case float64: + f := Float64(v) + s.InitialAnomalyScore = f + } + + case "is_interim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsInterim = value + case bool: + s.IsInterim = v + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "processing_time_ms": + if err := dec.Decode(&s.ProcessingTimeMs); err != nil { + return err + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timestamp_string": + if err := dec.Decode(&s.TimestampString); err != nil { + return err + } + + } + } + return nil +} + // NewBucketSummary returns a BucketSummary. func NewBucketSummary() *BucketSummary { r := &BucketSummary{} diff --git a/typedapi/types/bucketsvariablewidthhistogrambucket.go b/typedapi/types/bucketsvariablewidthhistogrambucket.go old mode 100755 new mode 100644 index 416cbe14a6..045fa13b04 --- a/typedapi/types/bucketsvariablewidthhistogrambucket.go +++ b/typedapi/types/bucketsvariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // map[string]VariableWidthHistogramBucket // []VariableWidthHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsVariableWidthHistogramBucket interface{} diff --git a/typedapi/types/bucketsvoid.go b/typedapi/types/bucketsvoid.go old mode 100755 new mode 100644 index 6125c475b6..2a7136fba1 --- a/typedapi/types/bucketsvoid.go +++ b/typedapi/types/bucketsvoid.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // BucketsVoid holds the union for the following types: // -// map[string]struct{} -// []struct{} +// map[string]interface{} +// []interface{} // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L315-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L316-L325 type BucketsVoid interface{} diff --git a/typedapi/types/buildinformation.go b/typedapi/types/buildinformation.go old mode 100755 new mode 100644 index b371301a69..7e6e6896ec --- a/typedapi/types/buildinformation.go +++ b/typedapi/types/buildinformation.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // BuildInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/types.ts#L24-L27 type BuildInformation struct { Date DateTime `json:"date"` Hash string `json:"hash"` } +func (s *BuildInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "date": + if err := dec.Decode(&s.Date); err != nil { + return err + } + + case "hash": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Hash = o + + } + } + return nil +} + // NewBuildInformation returns a BuildInformation. func NewBuildInformation() *BuildInformation { r := &BuildInformation{} diff --git a/typedapi/types/bulkindexbyscrollfailure.go b/typedapi/types/bulkindexbyscrollfailure.go old mode 100755 new mode 100644 index eb262c246e..4ef899da4c --- a/typedapi/types/bulkindexbyscrollfailure.go +++ b/typedapi/types/bulkindexbyscrollfailure.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BulkIndexByScrollFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Errors.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Errors.ts#L58-L64 type BulkIndexByScrollFailure struct { Cause ErrorCause `json:"cause"` Id string `json:"id"` @@ -31,6 +41,65 @@ type BulkIndexByScrollFailure struct { Type string `json:"type"` } +func (s *BulkIndexByScrollFailure) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cause": + if err := dec.Decode(&s.Cause); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "status": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Status = value + case float64: + f := int(v) + s.Status = f + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewBulkIndexByScrollFailure returns a BulkIndexByScrollFailure. func NewBulkIndexByScrollFailure() *BulkIndexByScrollFailure { r := &BulkIndexByScrollFailure{} diff --git a/typedapi/types/bulkstats.go b/typedapi/types/bulkstats.go old mode 100755 new mode 100644 index 95c3015ee7..4f6d3f9b91 --- a/typedapi/types/bulkstats.go +++ b/typedapi/types/bulkstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BulkStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L41-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L41-L51 type BulkStats struct { AvgSize ByteSize `json:"avg_size,omitempty"` AvgSizeInBytes int64 `json:"avg_size_in_bytes"` @@ -35,6 +45,101 @@ type BulkStats struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *BulkStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg_size": + if err := dec.Decode(&s.AvgSize); err != nil { + return err + } + + case "avg_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvgSizeInBytes = value + case float64: + f := int64(v) + s.AvgSizeInBytes = f + } + + case "avg_time": + if err := dec.Decode(&s.AvgTime); err != nil { + return err + } + + case "avg_time_in_millis": + if err := dec.Decode(&s.AvgTimeInMillis); err != nil { + return err + } + + case "total_operations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalOperations = value + case float64: + f := int64(v) + s.TotalOperations = f + } + + case "total_size": + if err := dec.Decode(&s.TotalSize); err != nil { + return err + } + + case "total_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSizeInBytes = value + case float64: + f := int64(v) + s.TotalSizeInBytes = f + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewBulkStats returns a BulkStats. func NewBulkStats() *BulkStats { r := &BulkStats{} diff --git a/typedapi/types/bytenumberproperty.go b/typedapi/types/bytenumberproperty.go old mode 100755 new mode 100644 index 05032b70fb..698a1ea0c5 --- a/typedapi/types/bytenumberproperty.go +++ b/typedapi/types/bytenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ByteNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L161-L164 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L161-L164 type ByteNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type ByteNumberProperty struct { } func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,28 +435,60 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -426,6 +504,9 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +794,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +806,39 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/bytesize.go b/typedapi/types/bytesize.go old mode 100755 new mode 100644 index 9d75cd5b08..4877a666d0 --- a/typedapi/types/bytesize.go +++ b/typedapi/types/bytesize.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L88-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L88-L89 type ByteSize interface{} diff --git a/typedapi/types/bytesprocessor.go b/typedapi/types/bytesprocessor.go old mode 100755 new mode 100644 index 63a41b15b9..44f7f545d0 --- a/typedapi/types/bytesprocessor.go +++ b/typedapi/types/bytesprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // BytesProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L123-L127 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L123-L127 type BytesProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type BytesProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *BytesProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewBytesProcessor returns a BytesProcessor. func NewBytesProcessor() *BytesProcessor { r := &BytesProcessor{} diff --git a/typedapi/types/cachequeries.go b/typedapi/types/cachequeries.go old mode 100755 new mode 100644 index fc44d74de8..0727cc0033 --- a/typedapi/types/cachequeries.go +++ b/typedapi/types/cachequeries.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CacheQueries type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L398-L400 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L398-L400 type CacheQueries struct { Enabled bool `json:"enabled"` } +func (s *CacheQueries) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewCacheQueries returns a CacheQueries. func NewCacheQueries() *CacheQueries { r := &CacheQueries{} diff --git a/typedapi/types/cachestats.go b/typedapi/types/cachestats.go old mode 100755 new mode 100644 index 5ce37452cf..e1669115b5 --- a/typedapi/types/cachestats.go +++ b/typedapi/types/cachestats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/stats/types.ts#L37-L43 type CacheStats struct { Count int `json:"count"` Evictions int `json:"evictions"` @@ -31,6 +41,95 @@ type CacheStats struct { NodeId string `json:"node_id"` } +func (s *CacheStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "evictions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Evictions = value + case float64: + f := int(v) + s.Evictions = f + } + + case "hits": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Hits = value + case float64: + f := int(v) + s.Hits = f + } + + case "misses": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Misses = value + case float64: + f := int(v) + s.Misses = f + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + } + } + return nil +} + // NewCacheStats returns a CacheStats. func NewCacheStats() *CacheStats { r := &CacheStats{} diff --git a/typedapi/types/calendar.go b/typedapi/types/calendar.go old mode 100755 new mode 100644 index ccfc89b266..09f09942e8 --- a/typedapi/types/calendar.go +++ b/typedapi/types/calendar.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Calendar type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_calendars/types.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_calendars/types.ts#L22-L29 type Calendar struct { // CalendarId A string that uniquely identifies a calendar. CalendarId string `json:"calendar_id"` @@ -32,6 +40,44 @@ type Calendar struct { JobIds []string `json:"job_ids"` } +func (s *Calendar) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_id": + if err := dec.Decode(&s.CalendarId); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "job_ids": + if err := dec.Decode(&s.JobIds); err != nil { + return err + } + + } + } + return nil +} + // NewCalendar returns a Calendar. func NewCalendar() *Calendar { r := &Calendar{} diff --git a/typedapi/types/calendarevent.go b/typedapi/types/calendarevent.go old mode 100755 new mode 100644 index ecb697b860..67fe61eae5 --- a/typedapi/types/calendarevent.go +++ b/typedapi/types/calendarevent.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // CalendarEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/CalendarEvent.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/CalendarEvent.ts#L23-L33 type CalendarEvent struct { // CalendarId A string that uniquely identifies a calendar. CalendarId *string `json:"calendar_id,omitempty"` @@ -37,6 +45,54 @@ type CalendarEvent struct { StartTime DateTime `json:"start_time"` } +func (s *CalendarEvent) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_id": + if err := dec.Decode(&s.CalendarId); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "end_time": + if err := dec.Decode(&s.EndTime); err != nil { + return err + } + + case "event_id": + if err := dec.Decode(&s.EventId); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + } + } + return nil +} + // NewCalendarEvent returns a CalendarEvent. func NewCalendarEvent() *CalendarEvent { r := &CalendarEvent{} diff --git a/typedapi/types/cardinalityaggregate.go b/typedapi/types/cardinalityaggregate.go old mode 100755 new mode 100644 index f1d52299be..410e89f597 --- a/typedapi/types/cardinalityaggregate.go +++ b/typedapi/types/cardinalityaggregate.go @@ -16,20 +16,66 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // CardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L137-L140 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L138-L141 type CardinalityAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Value int64 `json:"value"` + Meta Metadata `json:"meta,omitempty"` + Value int64 `json:"value"` +} + +func (s *CardinalityAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Value = value + case float64: + f := int64(v) + s.Value = f + } + + } + } + return nil } // NewCardinalityAggregate returns a CardinalityAggregate. diff --git a/typedapi/types/cardinalityaggregation.go b/typedapi/types/cardinalityaggregation.go old mode 100755 new mode 100644 index cd6f968090..83ad2c219d --- a/typedapi/types/cardinalityaggregation.go +++ b/typedapi/types/cardinalityaggregation.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/cardinalityexecutionmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // CardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L62-L66 type CardinalityAggregation struct { ExecutionHint *cardinalityexecutionmode.CardinalityExecutionMode `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` @@ -36,6 +44,76 @@ type CardinalityAggregation struct { Script Script `json:"script,omitempty"` } +func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "execution_hint": + if err := dec.Decode(&s.ExecutionHint); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "precision_threshold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrecisionThreshold = &value + case float64: + f := int(v) + s.PrecisionThreshold = &f + } + + case "rehash": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Rehash = &value + case bool: + s.Rehash = &v + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewCardinalityAggregation returns a CardinalityAggregation. func NewCardinalityAggregation() *CardinalityAggregation { r := &CardinalityAggregation{} diff --git a/typedapi/types/catanonalydetectorcolumns.go b/typedapi/types/catanonalydetectorcolumns.go old mode 100755 new mode 100644 index 6a92e45d95..89b752b521 --- a/typedapi/types/catanonalydetectorcolumns.go +++ b/typedapi/types/catanonalydetectorcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // CatAnonalyDetectorColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L402-L404 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L402-L404 type CatAnonalyDetectorColumns []catanomalydetectorcolumn.CatAnomalyDetectorColumn diff --git a/typedapi/types/catcomponenttemplate.go b/typedapi/types/catcomponenttemplate.go old mode 100755 new mode 100644 index e3a04706f3..fa00867bb5 --- a/typedapi/types/catcomponenttemplate.go +++ b/typedapi/types/catcomponenttemplate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CatComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/component_templates/types.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/component_templates/types.ts#L20-L28 type CatComponentTemplate struct { AliasCount string `json:"alias_count"` IncludedIn string `json:"included_in"` diff --git a/typedapi/types/catdatafeedcolumns.go b/typedapi/types/catdatafeedcolumns.go old mode 100755 new mode 100644 index b6c6f7256a..15e0304530 --- a/typedapi/types/catdatafeedcolumns.go +++ b/typedapi/types/catdatafeedcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // CatDatafeedColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L559-L559 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L559-L559 type CatDatafeedColumns []catdatafeedcolumn.CatDatafeedColumn diff --git a/typedapi/types/catdfacolumns.go b/typedapi/types/catdfacolumns.go old mode 100755 new mode 100644 index 1c670c3157..4143c2f238 --- a/typedapi/types/catdfacolumns.go +++ b/typedapi/types/catdfacolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // CatDfaColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L558-L558 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L558-L558 type CatDfaColumns []catdfacolumn.CatDfaColumn diff --git a/typedapi/types/categorizationanalyzer.go b/typedapi/types/categorizationanalyzer.go old mode 100755 new mode 100644 index 446ffbaacd..05a6415b7f --- a/typedapi/types/categorizationanalyzer.go +++ b/typedapi/types/categorizationanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // CategorizationAnalyzerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L124-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L124-L125 type CategorizationAnalyzer interface{} diff --git a/typedapi/types/categorizationanalyzerdefinition.go b/typedapi/types/categorizationanalyzerdefinition.go old mode 100755 new mode 100644 index 6eb45f2a2a..917b87acd4 --- a/typedapi/types/categorizationanalyzerdefinition.go +++ b/typedapi/types/categorizationanalyzerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // CategorizationAnalyzerDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L127-L140 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L127-L140 type CategorizationAnalyzerDefinition struct { // CharFilter One or more character filters. In addition to the built-in character filters, // other plugins can provide more character filters. If this property is not @@ -61,6 +61,7 @@ type CategorizationAnalyzerDefinition struct { } func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -82,6 +83,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { switch rawMsg[0] { case '{': + source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) kind := make(map[string]string, 0) @@ -92,37 +94,37 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { case "html_strip": o := NewHtmlStripCharFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) case "mapping": o := NewMappingCharFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) case "pattern_replace": o := NewPatternReplaceCharFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) case "icu_normalizer": o := NewIcuNormalizationCharFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) case "kuromoji_iteration_mark": o := NewKuromojiIterationMarkCharFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) default: o := new(interface{}) - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) @@ -130,7 +132,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { default: source := bytes.NewReader(rawMsg) o := new(interface{}) - if err := json.NewDecoder(source).Decode(o); err != nil { + if err := json.NewDecoder(source).Decode(&o); err != nil { return err } s.CharFilter = append(s.CharFilter, *o) @@ -145,6 +147,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { switch rawMsg[0] { case '{': + source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) kind := make(map[string]string, 0) @@ -155,295 +158,295 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { case "asciifolding": o := NewAsciiFoldingTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "common_grams": o := NewCommonGramsTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "condition": o := NewConditionTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "delimited_payload": o := NewDelimitedPayloadTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "edge_ngram": o := NewEdgeNGramTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "elision": o := NewElisionTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "fingerprint": o := NewFingerprintTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "hunspell": o := NewHunspellTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "hyphenation_decompounder": o := NewHyphenationDecompounderTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "keep_types": o := NewKeepTypesTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "keep": o := NewKeepWordsTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "keyword_marker": o := NewKeywordMarkerTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "kstem": o := NewKStemTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "length": o := NewLengthTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "limit": o := NewLimitTokenCountTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "lowercase": o := NewLowercaseTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "multiplexer": o := NewMultiplexerTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "ngram": o := NewNGramTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "nori_part_of_speech": o := NewNoriPartOfSpeechTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "pattern_capture": o := NewPatternCaptureTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "pattern_replace": o := NewPatternReplaceTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "porter_stem": o := NewPorterStemTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "predicate_token_filter": o := NewPredicateTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "remove_duplicates": o := NewRemoveDuplicatesTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "reverse": o := NewReverseTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "shingle": o := NewShingleTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "snowball": o := NewSnowballTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "stemmer_override": o := NewStemmerOverrideTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "stemmer": o := NewStemmerTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "stop": o := NewStopTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "synonym_graph": o := NewSynonymGraphTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "synonym": o := NewSynonymTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "trim": o := NewTrimTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "truncate": o := NewTruncateTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "unique": o := NewUniqueTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "uppercase": o := NewUppercaseTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "word_delimiter_graph": o := NewWordDelimiterGraphTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "word_delimiter": o := NewWordDelimiterTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "kuromoji_stemmer": o := NewKuromojiStemmerTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "kuromoji_readingform": o := NewKuromojiReadingFormTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "kuromoji_part_of_speech": o := NewKuromojiPartOfSpeechTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "icu_tokenizer": o := NewIcuTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "icu_collation": o := NewIcuCollationTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "icu_folding": o := NewIcuFoldingTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "icu_normalizer": o := NewIcuNormalizationTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "icu_transform": o := NewIcuTransformTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "phonetic": o := NewPhoneticTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) case "dictionary_decompounder": o := NewDictionaryDecompounderTokenFilter() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) default: o := new(interface{}) - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) @@ -451,7 +454,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { default: source := bytes.NewReader(rawMsg) o := new(interface{}) - if err := json.NewDecoder(source).Decode(o); err != nil { + if err := json.NewDecoder(source).Decode(&o); err != nil { return err } s.Filter = append(s.Filter, *o) @@ -459,12 +462,14 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } case "tokenizer": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': + kind := make(map[string]string, 0) localDec.Decode(&kind) source.Seek(0, io.SeekStart) @@ -473,90 +478,90 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { case "char_group": o := NewCharGroupTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "edge_ngram": o := NewEdgeNGramTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "keyword": o := NewKeywordTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "letter": o := NewLetterTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "lowercase": o := NewLowercaseTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "ngram": o := NewNGramTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "nori_tokenizer": o := NewNoriTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "path_hierarchy": o := NewPathHierarchyTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "standard": o := NewStandardTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "uax_url_email": o := NewUaxEmailUrlTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "whitespace": o := NewWhitespaceTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "kuromoji_tokenizer": o := NewKuromojiTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "pattern": o := NewPatternTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o case "icu_tokenizer": o := NewIcuTokenizer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Tokenizer = *o default: - if err := dec.Decode(&s.Tokenizer); err != nil { + if err := localDec.Decode(&s.Tokenizer); err != nil { return err } } diff --git a/typedapi/types/categorizetextaggregation.go b/typedapi/types/categorizetextaggregation.go old mode 100755 new mode 100644 index d98161f3ed..53f64ff571 --- a/typedapi/types/categorizetextaggregation.go +++ b/typedapi/types/categorizetextaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,12 +25,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // CategorizeTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L437-L501 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L437-L501 type CategorizeTextAggregation struct { // CategorizationAnalyzer The categorization analyzer specifies how the text is analyzed and tokenized // before being categorized. @@ -67,8 +69,8 @@ type CategorizeTextAggregation struct { // Smaller values use less memory and create fewer categories. Larger values // will use more memory and // create narrower categories. Max allowed value is 100. - MaxUniqueTokens *int `json:"max_unique_tokens,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + MaxUniqueTokens *int `json:"max_unique_tokens,omitempty"` + Meta Metadata `json:"meta,omitempty"` // MinDocCount The minimum number of documents for a bucket to be returned to the results. MinDocCount *int `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` @@ -89,6 +91,7 @@ type CategorizeTextAggregation struct { } func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -103,6 +106,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { switch t { case "categorization_analyzer": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -132,13 +136,35 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { } case "max_matched_tokens": - if err := dec.Decode(&s.MaxMatchedTokens); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxMatchedTokens = &value + case float64: + f := int(v) + s.MaxMatchedTokens = &f } case "max_unique_tokens": - if err := dec.Decode(&s.MaxUniqueTokens); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxUniqueTokens = &value + case float64: + f := int(v) + s.MaxUniqueTokens = &f } case "meta": @@ -147,33 +173,91 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { } case "min_doc_count": - if err := dec.Decode(&s.MinDocCount); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int(v) + s.MinDocCount = &f } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "shard_min_doc_count": - if err := dec.Decode(&s.ShardMinDocCount); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardMinDocCount = &value + case float64: + f := int(v) + s.ShardMinDocCount = &f } case "shard_size": - if err := dec.Decode(&s.ShardSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f } case "similarity_threshold": - if err := dec.Decode(&s.SimilarityThreshold); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SimilarityThreshold = &value + case float64: + f := int(v) + s.SimilarityThreshold = &f } case "size": - if err := dec.Decode(&s.Size); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f } } diff --git a/typedapi/types/categorizetextanalyzer.go b/typedapi/types/categorizetextanalyzer.go old mode 100755 new mode 100644 index 9333568575..b3e42a6e49 --- a/typedapi/types/categorizetextanalyzer.go +++ b/typedapi/types/categorizetextanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // CustomCategorizeTextAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L503-L506 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L503-L506 type CategorizeTextAnalyzer interface{} diff --git a/typedapi/types/category.go b/typedapi/types/category.go old mode 100755 new mode 100644 index 0f89947581..a3f154c91d --- a/typedapi/types/category.go +++ b/typedapi/types/category.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Category type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Category.ts#L23-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Category.ts#L23-L49 type Category struct { // CategoryId A unique identifier for the category. category_id is unique at the job level, // even when per-partition categorization is enabled. @@ -68,6 +78,130 @@ type Category struct { Terms string `json:"terms"` } +func (s *Category) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "category_id": + if err := dec.Decode(&s.CategoryId); err != nil { + return err + } + + case "examples": + if err := dec.Decode(&s.Examples); err != nil { + return err + } + + case "grok_pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GrokPattern = &o + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "max_matching_length": + if err := dec.Decode(&s.MaxMatchingLength); err != nil { + return err + } + + case "mlcategory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Mlcategory = o + + case "num_matches": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumMatches = &value + case float64: + f := int64(v) + s.NumMatches = &f + } + + case "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.P = &o + + case "partition_field_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldName = &o + + case "partition_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PartitionFieldValue = &o + + case "preferred_to_categories": + if err := dec.Decode(&s.PreferredToCategories); err != nil { + return err + } + + case "regex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Regex = o + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "terms": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Terms = o + + } + } + return nil +} + // NewCategory returns a Category. func NewCategory() *Category { r := &Category{} diff --git a/typedapi/types/cattrainedmodelscolumns.go b/typedapi/types/cattrainedmodelscolumns.go old mode 100755 new mode 100644 index d096cdf148..658a21f215 --- a/typedapi/types/cattrainedmodelscolumns.go +++ b/typedapi/types/cattrainedmodelscolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // CatTrainedModelsColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L636-L638 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L636-L638 type CatTrainedModelsColumns []cattrainedmodelscolumn.CatTrainedModelsColumn diff --git a/typedapi/types/cattransformcolumns.go b/typedapi/types/cattransformcolumns.go old mode 100755 new mode 100644 index 7858ff3309..4a7ebfbb51 --- a/typedapi/types/cattransformcolumns.go +++ b/typedapi/types/cattransformcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // CatTransformColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L845-L845 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L845-L845 type CatTransformColumns []cattransformcolumn.CatTransformColumn diff --git a/typedapi/types/ccr.go b/typedapi/types/ccr.go old mode 100755 new mode 100644 index 879623433d..100b1803e0 --- a/typedapi/types/ccr.go +++ b/typedapi/types/ccr.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Ccr type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L328-L331 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L328-L331 type Ccr struct { AutoFollowPatternsCount int `json:"auto_follow_patterns_count"` Available bool `json:"available"` @@ -30,6 +40,86 @@ type Ccr struct { FollowerIndicesCount int `json:"follower_indices_count"` } +func (s *Ccr) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auto_follow_patterns_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AutoFollowPatternsCount = value + case float64: + f := int(v) + s.AutoFollowPatternsCount = f + } + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "follower_indices_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FollowerIndicesCount = value + case float64: + f := int(v) + s.FollowerIndicesCount = f + } + + } + } + return nil +} + // NewCcr returns a Ccr. func NewCcr() *Ccr { r := &Ccr{} diff --git a/typedapi/types/ccrshardstats.go b/typedapi/types/ccrshardstats.go old mode 100755 new mode 100644 index 53a6153fd6..7ffd601fdc --- a/typedapi/types/ccrshardstats.go +++ b/typedapi/types/ccrshardstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CcrShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/_types/FollowIndexStats.ts#L35-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/_types/FollowIndexStats.ts#L35-L69 type CcrShardStats struct { BytesRead int64 `json:"bytes_read"` FailedReadRequests int64 `json:"failed_read_requests"` @@ -59,6 +69,333 @@ type CcrShardStats struct { WriteBufferSizeInBytes ByteSize `json:"write_buffer_size_in_bytes"` } +func (s *CcrShardStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bytes_read": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BytesRead = value + case float64: + f := int64(v) + s.BytesRead = f + } + + case "failed_read_requests": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FailedReadRequests = value + case float64: + f := int64(v) + s.FailedReadRequests = f + } + + case "failed_write_requests": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FailedWriteRequests = value + case float64: + f := int64(v) + s.FailedWriteRequests = f + } + + case "fatal_exception": + if err := dec.Decode(&s.FatalException); err != nil { + return err + } + + case "follower_aliases_version": + if err := dec.Decode(&s.FollowerAliasesVersion); err != nil { + return err + } + + case "follower_global_checkpoint": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FollowerGlobalCheckpoint = value + case float64: + f := int64(v) + s.FollowerGlobalCheckpoint = f + } + + case "follower_index": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FollowerIndex = o + + case "follower_mapping_version": + if err := dec.Decode(&s.FollowerMappingVersion); err != nil { + return err + } + + case "follower_max_seq_no": + if err := dec.Decode(&s.FollowerMaxSeqNo); err != nil { + return err + } + + case "follower_settings_version": + if err := dec.Decode(&s.FollowerSettingsVersion); err != nil { + return err + } + + case "last_requested_seq_no": + if err := dec.Decode(&s.LastRequestedSeqNo); err != nil { + return err + } + + case "leader_global_checkpoint": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LeaderGlobalCheckpoint = value + case float64: + f := int64(v) + s.LeaderGlobalCheckpoint = f + } + + case "leader_index": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LeaderIndex = o + + case "leader_max_seq_no": + if err := dec.Decode(&s.LeaderMaxSeqNo); err != nil { + return err + } + + case "operations_read": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OperationsRead = value + case float64: + f := int64(v) + s.OperationsRead = f + } + + case "operations_written": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OperationsWritten = value + case float64: + f := int64(v) + s.OperationsWritten = f + } + + case "outstanding_read_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OutstandingReadRequests = value + case float64: + f := int(v) + s.OutstandingReadRequests = f + } + + case "outstanding_write_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OutstandingWriteRequests = value + case float64: + f := int(v) + s.OutstandingWriteRequests = f + } + + case "read_exceptions": + if err := dec.Decode(&s.ReadExceptions); err != nil { + return err + } + + case "remote_cluster": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RemoteCluster = o + + case "shard_id": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardId = value + case float64: + f := int(v) + s.ShardId = f + } + + case "successful_read_requests": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SuccessfulReadRequests = value + case float64: + f := int64(v) + s.SuccessfulReadRequests = f + } + + case "successful_write_requests": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SuccessfulWriteRequests = value + case float64: + f := int64(v) + s.SuccessfulWriteRequests = f + } + + case "time_since_last_read": + if err := dec.Decode(&s.TimeSinceLastRead); err != nil { + return err + } + + case "time_since_last_read_millis": + if err := dec.Decode(&s.TimeSinceLastReadMillis); err != nil { + return err + } + + case "total_read_remote_exec_time": + if err := dec.Decode(&s.TotalReadRemoteExecTime); err != nil { + return err + } + + case "total_read_remote_exec_time_millis": + if err := dec.Decode(&s.TotalReadRemoteExecTimeMillis); err != nil { + return err + } + + case "total_read_time": + if err := dec.Decode(&s.TotalReadTime); err != nil { + return err + } + + case "total_read_time_millis": + if err := dec.Decode(&s.TotalReadTimeMillis); err != nil { + return err + } + + case "total_write_time": + if err := dec.Decode(&s.TotalWriteTime); err != nil { + return err + } + + case "total_write_time_millis": + if err := dec.Decode(&s.TotalWriteTimeMillis); err != nil { + return err + } + + case "write_buffer_operation_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.WriteBufferOperationCount = value + case float64: + f := int64(v) + s.WriteBufferOperationCount = f + } + + case "write_buffer_size_in_bytes": + if err := dec.Decode(&s.WriteBufferSizeInBytes); err != nil { + return err + } + + } + } + return nil +} + // NewCcrShardStats returns a CcrShardStats. func NewCcrShardStats() *CcrShardStats { r := &CcrShardStats{} diff --git a/typedapi/types/certificateinformation.go b/typedapi/types/certificateinformation.go old mode 100755 new mode 100644 index f9d0565b6b..0333f1ae29 --- a/typedapi/types/certificateinformation.go +++ b/typedapi/types/certificateinformation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CertificateInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ssl/certificates/types.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ssl/certificates/types.ts#L22-L31 type CertificateInformation struct { Alias string `json:"alias,omitempty"` Expiry DateTime `json:"expiry"` @@ -34,6 +44,93 @@ type CertificateInformation struct { SubjectDn string `json:"subject_dn"` } +func (s *CertificateInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alias": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Alias = o + + case "expiry": + if err := dec.Decode(&s.Expiry); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = o + + case "has_private_key": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HasPrivateKey = value + case bool: + s.HasPrivateKey = v + } + + case "issuer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Issuer = &o + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = o + + case "serial_number": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SerialNumber = o + + case "subject_dn": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SubjectDn = o + + } + } + return nil +} + // NewCertificateInformation returns a CertificateInformation. func NewCertificateInformation() *CertificateInformation { r := &CertificateInformation{} diff --git a/typedapi/types/cgroup.go b/typedapi/types/cgroup.go old mode 100755 new mode 100644 index 10bb216b25..1f570e0542 --- a/typedapi/types/cgroup.go +++ b/typedapi/types/cgroup.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Cgroup type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L188-L192 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L188-L192 type Cgroup struct { Cpu *CgroupCpu `json:"cpu,omitempty"` Cpuacct *CpuAcct `json:"cpuacct,omitempty"` diff --git a/typedapi/types/cgroupcpu.go b/typedapi/types/cgroupcpu.go old mode 100755 new mode 100644 index b2a6bbbc8a..bef3bc8a85 --- a/typedapi/types/cgroupcpu.go +++ b/typedapi/types/cgroupcpu.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CgroupCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L199-L204 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L199-L204 type CgroupCpu struct { CfsPeriodMicros *int `json:"cfs_period_micros,omitempty"` CfsQuotaMicros *int `json:"cfs_quota_micros,omitempty"` @@ -30,6 +40,71 @@ type CgroupCpu struct { Stat *CgroupCpuStat `json:"stat,omitempty"` } +func (s *CgroupCpu) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cfs_period_micros": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CfsPeriodMicros = &value + case float64: + f := int(v) + s.CfsPeriodMicros = &f + } + + case "cfs_quota_micros": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CfsQuotaMicros = &value + case float64: + f := int(v) + s.CfsQuotaMicros = &f + } + + case "control_group": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ControlGroup = &o + + case "stat": + if err := dec.Decode(&s.Stat); err != nil { + return err + } + + } + } + return nil +} + // NewCgroupCpu returns a CgroupCpu. func NewCgroupCpu() *CgroupCpu { r := &CgroupCpu{} diff --git a/typedapi/types/cgroupcpustat.go b/typedapi/types/cgroupcpustat.go old mode 100755 new mode 100644 index e67ffa9744..6767810c58 --- a/typedapi/types/cgroupcpustat.go +++ b/typedapi/types/cgroupcpustat.go @@ -16,19 +16,84 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CgroupCpuStat type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L206-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L206-L210 type CgroupCpuStat struct { NumberOfElapsedPeriods *int64 `json:"number_of_elapsed_periods,omitempty"` NumberOfTimesThrottled *int64 `json:"number_of_times_throttled,omitempty"` TimeThrottledNanos *int64 `json:"time_throttled_nanos,omitempty"` } +func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "number_of_elapsed_periods": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumberOfElapsedPeriods = &value + case float64: + f := int64(v) + s.NumberOfElapsedPeriods = &f + } + + case "number_of_times_throttled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumberOfTimesThrottled = &value + case float64: + f := int64(v) + s.NumberOfTimesThrottled = &f + } + + case "time_throttled_nanos": + if err := dec.Decode(&s.TimeThrottledNanos); err != nil { + return err + } + + } + } + return nil +} + // NewCgroupCpuStat returns a CgroupCpuStat. func NewCgroupCpuStat() *CgroupCpuStat { r := &CgroupCpuStat{} diff --git a/typedapi/types/cgroupmemory.go b/typedapi/types/cgroupmemory.go old mode 100755 new mode 100644 index bdab81fb0e..2e25bf5b26 --- a/typedapi/types/cgroupmemory.go +++ b/typedapi/types/cgroupmemory.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CgroupMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L212-L216 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L212-L216 type CgroupMemory struct { ControlGroup *string `json:"control_group,omitempty"` LimitInBytes *string `json:"limit_in_bytes,omitempty"` diff --git a/typedapi/types/chaininput.go b/typedapi/types/chaininput.go old mode 100755 new mode 100644 index 5354c50bf7..b9c55b86ca --- a/typedapi/types/chaininput.go +++ b/typedapi/types/chaininput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ChainInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L35-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L35-L37 type ChainInput struct { Inputs []map[string]WatcherInput `json:"inputs"` } diff --git a/typedapi/types/charfilter.go b/typedapi/types/charfilter.go old mode 100755 new mode 100644 index 69e01bef3e..1bd0946f1b --- a/typedapi/types/charfilter.go +++ b/typedapi/types/charfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // CharFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/char_filters.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/char_filters.ts#L28-L30 type CharFilter interface{} diff --git a/typedapi/types/charfilterdefinition.go b/typedapi/types/charfilterdefinition.go old mode 100755 new mode 100644 index 3342e7f8c2..7e7669d012 --- a/typedapi/types/charfilterdefinition.go +++ b/typedapi/types/charfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,5 +28,5 @@ package types // IcuNormalizationCharFilter // KuromojiIterationMarkCharFilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/char_filters.ts#L32-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/char_filters.ts#L32-L41 type CharFilterDefinition interface{} diff --git a/typedapi/types/charfilterdetail.go b/typedapi/types/charfilterdetail.go old mode 100755 new mode 100644 index 074948a2a2..672457a038 --- a/typedapi/types/charfilterdetail.go +++ b/typedapi/types/charfilterdetail.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CharFilterDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L46-L49 type CharFilterDetail struct { FilteredText []string `json:"filtered_text"` Name string `json:"name"` diff --git a/typedapi/types/charfiltertypes.go b/typedapi/types/charfiltertypes.go old mode 100755 new mode 100644 index f4e0ca39a7..07fdfd2d1c --- a/typedapi/types/charfiltertypes.go +++ b/typedapi/types/charfiltertypes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CharFilterTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L133-L142 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L133-L142 type CharFilterTypes struct { AnalyzerTypes []FieldTypes `json:"analyzer_types"` BuiltInAnalyzers []FieldTypes `json:"built_in_analyzers"` diff --git a/typedapi/types/chargrouptokenizer.go b/typedapi/types/chargrouptokenizer.go old mode 100755 new mode 100644 index ba015c0326..1ea4a2c483 --- a/typedapi/types/chargrouptokenizer.go +++ b/typedapi/types/chargrouptokenizer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CharGroupTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L55-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L55-L59 type CharGroupTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` TokenizeOnChars []string `json:"tokenize_on_chars"` @@ -30,6 +40,57 @@ type CharGroupTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *CharGroupTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTokenLength = &value + case float64: + f := int(v) + s.MaxTokenLength = &f + } + + case "tokenize_on_chars": + if err := dec.Decode(&s.TokenizeOnChars); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewCharGroupTokenizer returns a CharGroupTokenizer. func NewCharGroupTokenizer() *CharGroupTokenizer { r := &CharGroupTokenizer{} diff --git a/typedapi/types/checkpointing.go b/typedapi/types/checkpointing.go old mode 100755 new mode 100644 index 5e221de1c8..99b092ae0c --- a/typedapi/types/checkpointing.go +++ b/typedapi/types/checkpointing.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Checkpointing type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L82-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L82-L89 type Checkpointing struct { ChangesLastDetectedAt *int64 `json:"changes_last_detected_at,omitempty"` ChangesLastDetectedAtDateTime DateTime `json:"changes_last_detected_at_date_time,omitempty"` @@ -32,6 +42,86 @@ type Checkpointing struct { OperationsBehind *int64 `json:"operations_behind,omitempty"` } +func (s *Checkpointing) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "changes_last_detected_at": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ChangesLastDetectedAt = &value + case float64: + f := int64(v) + s.ChangesLastDetectedAt = &f + } + + case "changes_last_detected_at_date_time": + if err := dec.Decode(&s.ChangesLastDetectedAtDateTime); err != nil { + return err + } + + case "last": + if err := dec.Decode(&s.Last); err != nil { + return err + } + + case "last_search_time": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LastSearchTime = &value + case float64: + f := int64(v) + s.LastSearchTime = &f + } + + case "next": + if err := dec.Decode(&s.Next); err != nil { + return err + } + + case "operations_behind": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OperationsBehind = &value + case float64: + f := int64(v) + s.OperationsBehind = &f + } + + } + } + return nil +} + // NewCheckpointing returns a Checkpointing. func NewCheckpointing() *Checkpointing { r := &Checkpointing{} diff --git a/typedapi/types/checkpointstats.go b/typedapi/types/checkpointstats.go old mode 100755 new mode 100644 index 5a040003c4..88c0621b6c --- a/typedapi/types/checkpointstats.go +++ b/typedapi/types/checkpointstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CheckpointStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L73-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L73-L80 type CheckpointStats struct { Checkpoint int64 `json:"checkpoint"` CheckpointProgress *TransformProgress `json:"checkpoint_progress,omitempty"` @@ -32,6 +42,66 @@ type CheckpointStats struct { TimestampMillis *int64 `json:"timestamp_millis,omitempty"` } +func (s *CheckpointStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "checkpoint": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Checkpoint = value + case float64: + f := int64(v) + s.Checkpoint = f + } + + case "checkpoint_progress": + if err := dec.Decode(&s.CheckpointProgress); err != nil { + return err + } + + case "time_upper_bound": + if err := dec.Decode(&s.TimeUpperBound); err != nil { + return err + } + + case "time_upper_bound_millis": + if err := dec.Decode(&s.TimeUpperBoundMillis); err != nil { + return err + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timestamp_millis": + if err := dec.Decode(&s.TimestampMillis); err != nil { + return err + } + + } + } + return nil +} + // NewCheckpointStats returns a CheckpointStats. func NewCheckpointStats() *CheckpointStats { r := &CheckpointStats{} diff --git a/typedapi/types/childrenaggregate.go b/typedapi/types/childrenaggregate.go old mode 100755 new mode 100644 index 74b2e73c56..b874ae5b4a --- a/typedapi/types/childrenaggregate.go +++ b/typedapi/types/childrenaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // ChildrenAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L767-L768 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L776-L777 type ChildrenAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s ChildrenAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/childrenaggregation.go b/typedapi/types/childrenaggregation.go old mode 100755 new mode 100644 index 5a03797f12..9d1f72a0b2 --- a/typedapi/types/childrenaggregation.go +++ b/typedapi/types/childrenaggregation.go @@ -16,21 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ChildrenAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L73-L75 type ChildrenAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` +} + +func (s *ChildrenAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil } // NewChildrenAggregation returns a ChildrenAggregation. diff --git a/typedapi/types/chisquareheuristic.go b/typedapi/types/chisquareheuristic.go old mode 100755 new mode 100644 index 2f18280823..540ca35e97 --- a/typedapi/types/chisquareheuristic.go +++ b/typedapi/types/chisquareheuristic.go @@ -16,18 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ChiSquareHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L322-L325 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L322-L325 type ChiSquareHeuristic struct { BackgroundIsSuperset bool `json:"background_is_superset"` IncludeNegatives bool `json:"include_negatives"` } +func (s *ChiSquareHeuristic) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "background_is_superset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BackgroundIsSuperset = value + case bool: + s.BackgroundIsSuperset = v + } + + case "include_negatives": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeNegatives = value + case bool: + s.IncludeNegatives = v + } + + } + } + return nil +} + // NewChiSquareHeuristic returns a ChiSquareHeuristic. func NewChiSquareHeuristic() *ChiSquareHeuristic { r := &ChiSquareHeuristic{} diff --git a/typedapi/types/chunkingconfig.go b/typedapi/types/chunkingconfig.go old mode 100755 new mode 100644 index a8a1b9bf2f..52872e66dd --- a/typedapi/types/chunkingconfig.go +++ b/typedapi/types/chunkingconfig.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/chunkingmode" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ChunkingConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L177-L190 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L177-L190 type ChunkingConfig struct { // Mode If the mode is `auto`, the chunk size is dynamically calculated; // this is the recommended value when the datafeed does not use aggregations. @@ -40,6 +46,36 @@ type ChunkingConfig struct { TimeSpan Duration `json:"time_span,omitempty"` } +func (s *ChunkingConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "time_span": + if err := dec.Decode(&s.TimeSpan); err != nil { + return err + } + + } + } + return nil +} + // NewChunkingConfig returns a ChunkingConfig. func NewChunkingConfig() *ChunkingConfig { r := &ChunkingConfig{} diff --git a/typedapi/types/circleprocessor.go b/typedapi/types/circleprocessor.go old mode 100755 new mode 100644 index 3269210217..0e90388c21 --- a/typedapi/types/circleprocessor.go +++ b/typedapi/types/circleprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shapetype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // CircleProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L129-L135 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L129-L135 type CircleProcessor struct { Description *string `json:"description,omitempty"` ErrorDistance Float64 `json:"error_distance"` @@ -40,6 +48,114 @@ type CircleProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *CircleProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "error_distance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ErrorDistance = f + case float64: + f := Float64(v) + s.ErrorDistance = f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "shape_type": + if err := dec.Decode(&s.ShapeType); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewCircleProcessor returns a CircleProcessor. func NewCircleProcessor() *CircleProcessor { r := &CircleProcessor{} diff --git a/typedapi/types/classificationinferenceoptions.go b/typedapi/types/classificationinferenceoptions.go old mode 100755 new mode 100644 index 7855bf92fb..6e2f94b76d --- a/typedapi/types/classificationinferenceoptions.go +++ b/typedapi/types/classificationinferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L80-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L80-L95 type ClassificationInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -40,6 +50,82 @@ type ClassificationInferenceOptions struct { TopClassesResultsField *string `json:"top_classes_results_field,omitempty"` } +func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "prediction_field_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PredictionFieldType = &o + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "top_classes_results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TopClassesResultsField = &o + + } + } + return nil +} + // NewClassificationInferenceOptions returns a ClassificationInferenceOptions. func NewClassificationInferenceOptions() *ClassificationInferenceOptions { r := &ClassificationInferenceOptions{} diff --git a/typedapi/types/cleanuprepositoryresults.go b/typedapi/types/cleanuprepositoryresults.go old mode 100755 new mode 100644 index 3a7d27184a..e8483ab04d --- a/typedapi/types/cleanuprepositoryresults.go +++ b/typedapi/types/cleanuprepositoryresults.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CleanupRepositoryResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 type CleanupRepositoryResults struct { // DeletedBlobs Number of binary large objects (blobs) removed during cleanup. DeletedBlobs int64 `json:"deleted_blobs"` @@ -30,6 +40,56 @@ type CleanupRepositoryResults struct { DeletedBytes int64 `json:"deleted_bytes"` } +func (s *CleanupRepositoryResults) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "deleted_blobs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DeletedBlobs = value + case float64: + f := int64(v) + s.DeletedBlobs = f + } + + case "deleted_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DeletedBytes = value + case float64: + f := int64(v) + s.DeletedBytes = f + } + + } + } + return nil +} + // NewCleanupRepositoryResults returns a CleanupRepositoryResults. func NewCleanupRepositoryResults() *CleanupRepositoryResults { r := &CleanupRepositoryResults{} diff --git a/typedapi/types/client.go b/typedapi/types/client.go old mode 100755 new mode 100644 index df366eac89..2b04f89f15 --- a/typedapi/types/client.go +++ b/typedapi/types/client.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Client type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L272-L284 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L272-L284 type Client struct { Agent *string `json:"agent,omitempty"` ClosedTimeMillis *int64 `json:"closed_time_millis,omitempty"` @@ -37,6 +47,156 @@ type Client struct { XOpaqueId *string `json:"x_opaque_id,omitempty"` } +func (s *Client) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "agent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Agent = &o + + case "closed_time_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ClosedTimeMillis = &value + case float64: + f := int64(v) + s.ClosedTimeMillis = &f + } + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = &value + case float64: + f := int64(v) + s.Id = &f + } + + case "last_request_time_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LastRequestTimeMillis = &value + case float64: + f := int64(v) + s.LastRequestTimeMillis = &f + } + + case "last_uri": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LastUri = &o + + case "local_address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LocalAddress = &o + + case "opened_time_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OpenedTimeMillis = &value + case float64: + f := int64(v) + s.OpenedTimeMillis = &f + } + + case "remote_address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RemoteAddress = &o + + case "request_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RequestCount = &value + case float64: + f := int64(v) + s.RequestCount = &f + } + + case "request_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RequestSizeBytes = &value + case float64: + f := int64(v) + s.RequestSizeBytes = &f + } + + case "x_opaque_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.XOpaqueId = &o + + } + } + return nil +} + // NewClient returns a Client. func NewClient() *Client { r := &Client{} diff --git a/typedapi/types/closeindexresult.go b/typedapi/types/closeindexresult.go old mode 100755 new mode 100644 index 8337e446c3..37e209738c --- a/typedapi/types/closeindexresult.go +++ b/typedapi/types/closeindexresult.go @@ -16,18 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CloseIndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/close/CloseIndexResponse.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/close/CloseIndexResponse.ts#L32-L35 type CloseIndexResult struct { Closed bool `json:"closed"` Shards map[string]CloseShardResult `json:"shards,omitempty"` } +func (s *CloseIndexResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "closed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Closed = value + case bool: + s.Closed = v + } + + case "shards": + if s.Shards == nil { + s.Shards = make(map[string]CloseShardResult, 0) + } + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + } + } + return nil +} + // NewCloseIndexResult returns a CloseIndexResult. func NewCloseIndexResult() *CloseIndexResult { r := &CloseIndexResult{ diff --git a/typedapi/types/closeshardresult.go b/typedapi/types/closeshardresult.go old mode 100755 new mode 100644 index 58e0268f1a..0a317abf38 --- a/typedapi/types/closeshardresult.go +++ b/typedapi/types/closeshardresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CloseShardResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/close/CloseIndexResponse.ts#L37-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/close/CloseIndexResponse.ts#L37-L39 type CloseShardResult struct { Failures []ShardFailure `json:"failures"` } diff --git a/typedapi/types/clusterappliedstats.go b/typedapi/types/clusterappliedstats.go old mode 100755 new mode 100644 index b93af4db83..134e083a9d --- a/typedapi/types/clusterappliedstats.go +++ b/typedapi/types/clusterappliedstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ClusterAppliedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L90-L92 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L90-L92 type ClusterAppliedStats struct { Recordings []Recording `json:"recordings,omitempty"` } diff --git a/typedapi/types/clustercomponenttemplate.go b/typedapi/types/clustercomponenttemplate.go old mode 100755 new mode 100644 index 683beaf07e..56da9616cb --- a/typedapi/types/clustercomponenttemplate.go +++ b/typedapi/types/clustercomponenttemplate.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ClusterComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/_types/ComponentTemplate.ts#L26-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/_types/ComponentTemplate.ts#L26-L29 type ClusterComponentTemplate struct { ComponentTemplate ComponentTemplateNode `json:"component_template"` Name string `json:"name"` } +func (s *ClusterComponentTemplate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "component_template": + if err := dec.Decode(&s.ComponentTemplate); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewClusterComponentTemplate returns a ClusterComponentTemplate. func NewClusterComponentTemplate() *ClusterComponentTemplate { r := &ClusterComponentTemplate{} diff --git a/typedapi/types/clusterfilesystem.go b/typedapi/types/clusterfilesystem.go old mode 100755 new mode 100644 index ba10601046..74627dc8e1 --- a/typedapi/types/clusterfilesystem.go +++ b/typedapi/types/clusterfilesystem.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterFileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L34-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L34-L38 type ClusterFileSystem struct { AvailableInBytes int64 `json:"available_in_bytes"` FreeInBytes int64 `json:"free_in_bytes"` TotalInBytes int64 `json:"total_in_bytes"` } +func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvailableInBytes = value + case float64: + f := int64(v) + s.AvailableInBytes = f + } + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = value + case float64: + f := int64(v) + s.FreeInBytes = f + } + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = value + case float64: + f := int64(v) + s.TotalInBytes = f + } + + } + } + return nil +} + // NewClusterFileSystem returns a ClusterFileSystem. func NewClusterFileSystem() *ClusterFileSystem { r := &ClusterFileSystem{} diff --git a/typedapi/types/clusterindexingpressure.go b/typedapi/types/clusterindexingpressure.go old mode 100755 new mode 100644 index d261948502..dcaca43c8c --- a/typedapi/types/clusterindexingpressure.go +++ b/typedapi/types/clusterindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ClusterIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L299-L301 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L299-L301 type ClusterIndexingPressure struct { Memory ClusterPressureMemory `json:"memory"` } diff --git a/typedapi/types/clusterindices.go b/typedapi/types/clusterindices.go old mode 100755 new mode 100644 index 0d9a8cbfbe..b8ce9e9e6e --- a/typedapi/types/clusterindices.go +++ b/typedapi/types/clusterindices.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L63-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L63-L94 type ClusterIndices struct { // Analysis Contains statistics about analyzers and analyzer components used in selected // nodes. @@ -48,6 +58,91 @@ type ClusterIndices struct { Versions []IndicesVersions `json:"versions,omitempty"` } +func (s *ClusterIndices) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analysis": + if err := dec.Decode(&s.Analysis); err != nil { + return err + } + + case "completion": + if err := dec.Decode(&s.Completion); err != nil { + return err + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "docs": + if err := dec.Decode(&s.Docs); err != nil { + return err + } + + case "fielddata": + if err := dec.Decode(&s.Fielddata); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "query_cache": + if err := dec.Decode(&s.QueryCache); err != nil { + return err + } + + case "segments": + if err := dec.Decode(&s.Segments); err != nil { + return err + } + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "store": + if err := dec.Decode(&s.Store); err != nil { + return err + } + + case "versions": + if err := dec.Decode(&s.Versions); err != nil { + return err + } + + } + } + return nil +} + // NewClusterIndices returns a ClusterIndices. func NewClusterIndices() *ClusterIndices { r := &ClusterIndices{} diff --git a/typedapi/types/clusterindicesshards.go b/typedapi/types/clusterindicesshards.go old mode 100755 new mode 100644 index 10e86e5512..99c36cfe27 --- a/typedapi/types/clusterindicesshards.go +++ b/typedapi/types/clusterindicesshards.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterIndicesShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L49-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L49-L61 type ClusterIndicesShards struct { // Index Contains statistics about shards assigned to selected nodes. Index *ClusterIndicesShardsIndex `json:"index,omitempty"` @@ -34,6 +44,79 @@ type ClusterIndicesShards struct { Total *Float64 `json:"total,omitempty"` } +func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Primaries = &f + case float64: + f := Float64(v) + s.Primaries = &f + } + + case "replication": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Replication = &f + case float64: + f := Float64(v) + s.Replication = &f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Total = &f + case float64: + f := Float64(v) + s.Total = &f + } + + } + } + return nil +} + // NewClusterIndicesShards returns a ClusterIndicesShards. func NewClusterIndicesShards() *ClusterIndicesShards { r := &ClusterIndicesShards{} diff --git a/typedapi/types/clusterindicesshardsindex.go b/typedapi/types/clusterindicesshardsindex.go old mode 100755 new mode 100644 index 251d7850b5..2f411b37bd --- a/typedapi/types/clusterindicesshardsindex.go +++ b/typedapi/types/clusterindicesshardsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ClusterIndicesShardsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L40-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L40-L47 type ClusterIndicesShardsIndex struct { // Primaries Contains statistics about the number of primary shards assigned to selected // nodes. diff --git a/typedapi/types/clusterinfo.go b/typedapi/types/clusterinfo.go old mode 100755 new mode 100644 index 1c826e6bbb..1a2b0a1a63 --- a/typedapi/types/clusterinfo.go +++ b/typedapi/types/clusterinfo.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ClusterInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L48-L54 type ClusterInfo struct { Nodes map[string]NodeDiskUsage `json:"nodes"` ReservedSizes []ReservedSize `json:"reserved_sizes"` @@ -31,6 +39,63 @@ type ClusterInfo struct { ShardSizes map[string]int64 `json:"shard_sizes"` } +func (s *ClusterInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "nodes": + if s.Nodes == nil { + s.Nodes = make(map[string]NodeDiskUsage, 0) + } + if err := dec.Decode(&s.Nodes); err != nil { + return err + } + + case "reserved_sizes": + if err := dec.Decode(&s.ReservedSizes); err != nil { + return err + } + + case "shard_data_set_sizes": + if s.ShardDataSetSizes == nil { + s.ShardDataSetSizes = make(map[string]string, 0) + } + if err := dec.Decode(&s.ShardDataSetSizes); err != nil { + return err + } + + case "shard_paths": + if s.ShardPaths == nil { + s.ShardPaths = make(map[string]string, 0) + } + if err := dec.Decode(&s.ShardPaths); err != nil { + return err + } + + case "shard_sizes": + if s.ShardSizes == nil { + s.ShardSizes = make(map[string]int64, 0) + } + if err := dec.Decode(&s.ShardSizes); err != nil { + return err + } + + } + } + return nil +} + // NewClusterInfo returns a ClusterInfo. func NewClusterInfo() *ClusterInfo { r := &ClusterInfo{ diff --git a/typedapi/types/clusteringest.go b/typedapi/types/clusteringest.go old mode 100755 new mode 100644 index 892d639258..cc604cdc83 --- a/typedapi/types/clusteringest.go +++ b/typedapi/types/clusteringest.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L151-L154 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L151-L154 type ClusterIngest struct { NumberOfPipelines int `json:"number_of_pipelines"` ProcessorStats map[string]ClusterProcessor `json:"processor_stats"` } +func (s *ClusterIngest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "number_of_pipelines": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfPipelines = value + case float64: + f := int(v) + s.NumberOfPipelines = f + } + + case "processor_stats": + if s.ProcessorStats == nil { + s.ProcessorStats = make(map[string]ClusterProcessor, 0) + } + if err := dec.Decode(&s.ProcessorStats); err != nil { + return err + } + + } + } + return nil +} + // NewClusterIngest returns a ClusterIngest. func NewClusterIngest() *ClusterIngest { r := &ClusterIngest{ diff --git a/typedapi/types/clusterjvm.go b/typedapi/types/clusterjvm.go old mode 100755 new mode 100644 index 52d2b2c91b..7d62ac3ec5 --- a/typedapi/types/clusterjvm.go +++ b/typedapi/types/clusterjvm.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterJvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L156-L161 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L156-L161 type ClusterJvm struct { MaxUptimeInMillis int64 `json:"max_uptime_in_millis"` Mem ClusterJvmMemory `json:"mem"` @@ -30,6 +40,56 @@ type ClusterJvm struct { Versions []ClusterJvmVersion `json:"versions"` } +func (s *ClusterJvm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_uptime_in_millis": + if err := dec.Decode(&s.MaxUptimeInMillis); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "threads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Threads = value + case float64: + f := int64(v) + s.Threads = f + } + + case "versions": + if err := dec.Decode(&s.Versions); err != nil { + return err + } + + } + } + return nil +} + // NewClusterJvm returns a ClusterJvm. func NewClusterJvm() *ClusterJvm { r := &ClusterJvm{} diff --git a/typedapi/types/clusterjvmmemory.go b/typedapi/types/clusterjvmmemory.go old mode 100755 new mode 100644 index da00a6d69d..54be99b844 --- a/typedapi/types/clusterjvmmemory.go +++ b/typedapi/types/clusterjvmmemory.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L163-L166 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L163-L166 type ClusterJvmMemory struct { HeapMaxInBytes int64 `json:"heap_max_in_bytes"` HeapUsedInBytes int64 `json:"heap_used_in_bytes"` } +func (s *ClusterJvmMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "heap_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapMaxInBytes = value + case float64: + f := int64(v) + s.HeapMaxInBytes = f + } + + case "heap_used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapUsedInBytes = value + case float64: + f := int64(v) + s.HeapUsedInBytes = f + } + + } + } + return nil +} + // NewClusterJvmMemory returns a ClusterJvmMemory. func NewClusterJvmMemory() *ClusterJvmMemory { r := &ClusterJvmMemory{} diff --git a/typedapi/types/clusterjvmversion.go b/typedapi/types/clusterjvmversion.go old mode 100755 new mode 100644 index f4d3a19a67..6f0b916020 --- a/typedapi/types/clusterjvmversion.go +++ b/typedapi/types/clusterjvmversion.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterJvmVersion type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L168-L176 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L168-L176 type ClusterJvmVersion struct { BundledJdk bool `json:"bundled_jdk"` Count int `json:"count"` @@ -33,6 +43,96 @@ type ClusterJvmVersion struct { VmVersion string `json:"vm_version"` } +func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bundled_jdk": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BundledJdk = value + case bool: + s.BundledJdk = v + } + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "using_bundled_jdk": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UsingBundledJdk = value + case bool: + s.UsingBundledJdk = v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "vm_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VmName = o + + case "vm_vendor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VmVendor = o + + case "vm_version": + if err := dec.Decode(&s.VmVersion); err != nil { + return err + } + + } + } + return nil +} + // NewClusterJvmVersion returns a ClusterJvmVersion. func NewClusterJvmVersion() *ClusterJvmVersion { r := &ClusterJvmVersion{} diff --git a/typedapi/types/clusternetworktypes.go b/typedapi/types/clusternetworktypes.go old mode 100755 new mode 100644 index 9852501167..20799cb433 --- a/typedapi/types/clusternetworktypes.go +++ b/typedapi/types/clusternetworktypes.go @@ -16,18 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ClusterNetworkTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L178-L181 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L178-L181 type ClusterNetworkTypes struct { HttpTypes map[string]int `json:"http_types"` TransportTypes map[string]int `json:"transport_types"` } +func (s *ClusterNetworkTypes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "http_types": + if s.HttpTypes == nil { + s.HttpTypes = make(map[string]int, 0) + } + if err := dec.Decode(&s.HttpTypes); err != nil { + return err + } + + case "transport_types": + if s.TransportTypes == nil { + s.TransportTypes = make(map[string]int, 0) + } + if err := dec.Decode(&s.TransportTypes); err != nil { + return err + } + + } + } + return nil +} + // NewClusterNetworkTypes returns a ClusterNetworkTypes. func NewClusterNetworkTypes() *ClusterNetworkTypes { r := &ClusterNetworkTypes{ diff --git a/typedapi/types/clusternode.go b/typedapi/types/clusternode.go old mode 100755 new mode 100644 index df0ba28cce..97104fec30 --- a/typedapi/types/clusternode.go +++ b/typedapi/types/clusternode.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ClusterNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/ClusterNode.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/ClusterNode.ts#L22-L24 type ClusterNode struct { Name string `json:"name"` } +func (s *ClusterNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewClusterNode returns a ClusterNode. func NewClusterNode() *ClusterNode { r := &ClusterNode{} diff --git a/typedapi/types/clusternodecount.go b/typedapi/types/clusternodecount.go old mode 100755 new mode 100644 index 7522b73b81..9d987caba1 --- a/typedapi/types/clusternodecount.go +++ b/typedapi/types/clusternodecount.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterNodeCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L183-L199 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L183-L199 type ClusterNodeCount struct { CoordinatingOnly int `json:"coordinating_only"` Data int `json:"data"` @@ -40,6 +50,250 @@ type ClusterNodeCount struct { VotingOnly int `json:"voting_only"` } +func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "coordinating_only": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CoordinatingOnly = value + case float64: + f := int(v) + s.CoordinatingOnly = f + } + + case "data": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Data = value + case float64: + f := int(v) + s.Data = f + } + + case "data_cold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataCold = value + case float64: + f := int(v) + s.DataCold = f + } + + case "data_content": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataContent = value + case float64: + f := int(v) + s.DataContent = f + } + + case "data_frozen": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataFrozen = &value + case float64: + f := int(v) + s.DataFrozen = &f + } + + case "data_hot": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataHot = value + case float64: + f := int(v) + s.DataHot = f + } + + case "data_warm": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataWarm = value + case float64: + f := int(v) + s.DataWarm = f + } + + case "ingest": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Ingest = value + case float64: + f := int(v) + s.Ingest = f + } + + case "master": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Master = value + case float64: + f := int(v) + s.Master = f + } + + case "ml": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Ml = value + case float64: + f := int(v) + s.Ml = f + } + + case "remote_cluster_client": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RemoteClusterClient = value + case float64: + f := int(v) + s.RemoteClusterClient = f + } + + case "total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Total = value + case float64: + f := int(v) + s.Total = f + } + + case "transform": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Transform = value + case float64: + f := int(v) + s.Transform = f + } + + case "voting_only": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.VotingOnly = value + case float64: + f := int(v) + s.VotingOnly = f + } + + } + } + return nil +} + // NewClusterNodeCount returns a ClusterNodeCount. func NewClusterNodeCount() *ClusterNodeCount { r := &ClusterNodeCount{} diff --git a/typedapi/types/clusternodes.go b/typedapi/types/clusternodes.go old mode 100755 new mode 100644 index 34499c3cec..2a0cf82ca1 --- a/typedapi/types/clusternodes.go +++ b/typedapi/types/clusternodes.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ClusterNodes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L201-L228 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L201-L228 type ClusterNodes struct { // Count Contains counts for nodes selected by the request’s node filters. Count ClusterNodeCount `json:"count"` @@ -51,6 +59,89 @@ type ClusterNodes struct { Versions []string `json:"versions"` } +func (s *ClusterNodes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + if err := dec.Decode(&s.Count); err != nil { + return err + } + + case "discovery_types": + if s.DiscoveryTypes == nil { + s.DiscoveryTypes = make(map[string]int, 0) + } + if err := dec.Decode(&s.DiscoveryTypes); err != nil { + return err + } + + case "fs": + if err := dec.Decode(&s.Fs); err != nil { + return err + } + + case "indexing_pressure": + if err := dec.Decode(&s.IndexingPressure); err != nil { + return err + } + + case "ingest": + if err := dec.Decode(&s.Ingest); err != nil { + return err + } + + case "jvm": + if err := dec.Decode(&s.Jvm); err != nil { + return err + } + + case "network_types": + if err := dec.Decode(&s.NetworkTypes); err != nil { + return err + } + + case "os": + if err := dec.Decode(&s.Os); err != nil { + return err + } + + case "packaging_types": + if err := dec.Decode(&s.PackagingTypes); err != nil { + return err + } + + case "plugins": + if err := dec.Decode(&s.Plugins); err != nil { + return err + } + + case "process": + if err := dec.Decode(&s.Process); err != nil { + return err + } + + case "versions": + if err := dec.Decode(&s.Versions); err != nil { + return err + } + + } + } + return nil +} + // NewClusterNodes returns a ClusterNodes. func NewClusterNodes() *ClusterNodes { r := &ClusterNodes{ diff --git a/typedapi/types/clusteroperatingsystem.go b/typedapi/types/clusteroperatingsystem.go old mode 100755 new mode 100644 index daf3c544e9..a575305a76 --- a/typedapi/types/clusteroperatingsystem.go +++ b/typedapi/types/clusteroperatingsystem.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterOperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L235-L242 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L235-L242 type ClusterOperatingSystem struct { AllocatedProcessors int `json:"allocated_processors"` Architectures []ClusterOperatingSystemArchitecture `json:"architectures,omitempty"` @@ -32,6 +42,78 @@ type ClusterOperatingSystem struct { PrettyNames []ClusterOperatingSystemPrettyName `json:"pretty_names"` } +func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocated_processors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AllocatedProcessors = value + case float64: + f := int(v) + s.AllocatedProcessors = f + } + + case "architectures": + if err := dec.Decode(&s.Architectures); err != nil { + return err + } + + case "available_processors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AvailableProcessors = value + case float64: + f := int(v) + s.AvailableProcessors = f + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "names": + if err := dec.Decode(&s.Names); err != nil { + return err + } + + case "pretty_names": + if err := dec.Decode(&s.PrettyNames); err != nil { + return err + } + + } + } + return nil +} + // NewClusterOperatingSystem returns a ClusterOperatingSystem. func NewClusterOperatingSystem() *ClusterOperatingSystem { r := &ClusterOperatingSystem{} diff --git a/typedapi/types/clusteroperatingsystemarchitecture.go b/typedapi/types/clusteroperatingsystemarchitecture.go old mode 100755 new mode 100644 index eddb888159..e02db47c1f --- a/typedapi/types/clusteroperatingsystemarchitecture.go +++ b/typedapi/types/clusteroperatingsystemarchitecture.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterOperatingSystemArchitecture type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L230-L233 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L230-L233 type ClusterOperatingSystemArchitecture struct { Arch string `json:"arch"` Count int `json:"count"` } +func (s *ClusterOperatingSystemArchitecture) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "arch": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Arch = o + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + } + } + return nil +} + // NewClusterOperatingSystemArchitecture returns a ClusterOperatingSystemArchitecture. func NewClusterOperatingSystemArchitecture() *ClusterOperatingSystemArchitecture { r := &ClusterOperatingSystemArchitecture{} diff --git a/typedapi/types/clusteroperatingsystemname.go b/typedapi/types/clusteroperatingsystemname.go old mode 100755 new mode 100644 index 5aec67afe0..2aef9497e4 --- a/typedapi/types/clusteroperatingsystemname.go +++ b/typedapi/types/clusteroperatingsystemname.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterOperatingSystemName type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L244-L247 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L244-L247 type ClusterOperatingSystemName struct { Count int `json:"count"` Name string `json:"name"` } +func (s *ClusterOperatingSystemName) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewClusterOperatingSystemName returns a ClusterOperatingSystemName. func NewClusterOperatingSystemName() *ClusterOperatingSystemName { r := &ClusterOperatingSystemName{} diff --git a/typedapi/types/clusteroperatingsystemprettyname.go b/typedapi/types/clusteroperatingsystemprettyname.go old mode 100755 new mode 100644 index fb01e495a7..597ddefa35 --- a/typedapi/types/clusteroperatingsystemprettyname.go +++ b/typedapi/types/clusteroperatingsystemprettyname.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterOperatingSystemPrettyName type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L249-L252 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L249-L252 type ClusterOperatingSystemPrettyName struct { Count int `json:"count"` PrettyName string `json:"pretty_name"` } +func (s *ClusterOperatingSystemPrettyName) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "pretty_name": + if err := dec.Decode(&s.PrettyName); err != nil { + return err + } + + } + } + return nil +} + // NewClusterOperatingSystemPrettyName returns a ClusterOperatingSystemPrettyName. func NewClusterOperatingSystemPrettyName() *ClusterOperatingSystemPrettyName { r := &ClusterOperatingSystemPrettyName{} diff --git a/typedapi/types/clusterpressurememory.go b/typedapi/types/clusterpressurememory.go old mode 100755 new mode 100644 index 0607701711..9df46597ed --- a/typedapi/types/clusterpressurememory.go +++ b/typedapi/types/clusterpressurememory.go @@ -16,19 +16,74 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L303-L307 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L303-L307 type ClusterPressureMemory struct { Current IndexingPressureMemorySummary `json:"current"` LimitInBytes int64 `json:"limit_in_bytes"` Total IndexingPressureMemorySummary `json:"total"` } +func (s *ClusterPressureMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current": + if err := dec.Decode(&s.Current); err != nil { + return err + } + + case "limit_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LimitInBytes = value + case float64: + f := int64(v) + s.LimitInBytes = f + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewClusterPressureMemory returns a ClusterPressureMemory. func NewClusterPressureMemory() *ClusterPressureMemory { r := &ClusterPressureMemory{} diff --git a/typedapi/types/clusterprocess.go b/typedapi/types/clusterprocess.go old mode 100755 new mode 100644 index 1b65d57f68..063c0f0502 --- a/typedapi/types/clusterprocess.go +++ b/typedapi/types/clusterprocess.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ClusterProcess type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L254-L257 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L254-L257 type ClusterProcess struct { Cpu ClusterProcessCpu `json:"cpu"` OpenFileDescriptors ClusterProcessOpenFileDescriptors `json:"open_file_descriptors"` diff --git a/typedapi/types/clusterprocesscpu.go b/typedapi/types/clusterprocesscpu.go old mode 100755 new mode 100644 index 2cc631ac5a..898aaa023c --- a/typedapi/types/clusterprocesscpu.go +++ b/typedapi/types/clusterprocesscpu.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterProcessCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L259-L261 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L259-L261 type ClusterProcessCpu struct { Percent int `json:"percent"` } +func (s *ClusterProcessCpu) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Percent = value + case float64: + f := int(v) + s.Percent = f + } + + } + } + return nil +} + // NewClusterProcessCpu returns a ClusterProcessCpu. func NewClusterProcessCpu() *ClusterProcessCpu { r := &ClusterProcessCpu{} diff --git a/typedapi/types/clusterprocessopenfiledescriptors.go b/typedapi/types/clusterprocessopenfiledescriptors.go old mode 100755 new mode 100644 index caaad4955d..8de50e65d6 --- a/typedapi/types/clusterprocessopenfiledescriptors.go +++ b/typedapi/types/clusterprocessopenfiledescriptors.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterProcessOpenFileDescriptors type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L263-L267 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L263-L267 type ClusterProcessOpenFileDescriptors struct { Avg int64 `json:"avg"` Max int64 `json:"max"` Min int64 `json:"min"` } +func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Avg = value + case float64: + f := int64(v) + s.Avg = f + } + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Max = value + case float64: + f := int64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Min = value + case float64: + f := int64(v) + s.Min = f + } + + } + } + return nil +} + // NewClusterProcessOpenFileDescriptors returns a ClusterProcessOpenFileDescriptors. func NewClusterProcessOpenFileDescriptors() *ClusterProcessOpenFileDescriptors { r := &ClusterProcessOpenFileDescriptors{} diff --git a/typedapi/types/clusterprocessor.go b/typedapi/types/clusterprocessor.go old mode 100755 new mode 100644 index c42b39f3b6..98c20c58e7 --- a/typedapi/types/clusterprocessor.go +++ b/typedapi/types/clusterprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L269-L275 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L269-L275 type ClusterProcessor struct { Count int64 `json:"count"` Current int64 `json:"current"` @@ -31,6 +41,81 @@ type ClusterProcessor struct { TimeInMillis int64 `json:"time_in_millis"` } +func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = value + case float64: + f := int64(v) + s.Current = f + } + + case "failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Failed = value + case float64: + f := int64(v) + s.Failed = f + } + + case "time": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewClusterProcessor returns a ClusterProcessor. func NewClusterProcessor() *ClusterProcessor { r := &ClusterProcessor{} diff --git a/typedapi/types/clusterremoteinfo.go b/typedapi/types/clusterremoteinfo.go old mode 100755 new mode 100644 index cbd61c7b11..7189fa5e2b --- a/typedapi/types/clusterremoteinfo.go +++ b/typedapi/types/clusterremoteinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // ClusterRemoteSniffInfo // ClusterRemoteProxyInfo // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L28-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L28-L29 type ClusterRemoteInfo interface{} diff --git a/typedapi/types/clusterremoteproxyinfo.go b/typedapi/types/clusterremoteproxyinfo.go old mode 100755 new mode 100644 index cb4feeb292..eefe752d83 --- a/typedapi/types/clusterremoteproxyinfo.go +++ b/typedapi/types/clusterremoteproxyinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterRemoteProxyInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L41-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L41-L50 type ClusterRemoteProxyInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -34,6 +44,112 @@ type ClusterRemoteProxyInfo struct { SkipUnavailable bool `json:"skip_unavailable"` } +func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "connected": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Connected = value + case bool: + s.Connected = v + } + + case "initial_connect_timeout": + if err := dec.Decode(&s.InitialConnectTimeout); err != nil { + return err + } + + case "max_proxy_socket_connections": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxProxySocketConnections = value + case float64: + f := int(v) + s.MaxProxySocketConnections = f + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "num_proxy_sockets_connected": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumProxySocketsConnected = value + case float64: + f := int(v) + s.NumProxySocketsConnected = f + } + + case "proxy_address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ProxyAddress = o + + case "server_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ServerName = o + + case "skip_unavailable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SkipUnavailable = value + case bool: + s.SkipUnavailable = v + } + + } + } + return nil +} + // NewClusterRemoteProxyInfo returns a ClusterRemoteProxyInfo. func NewClusterRemoteProxyInfo() *ClusterRemoteProxyInfo { r := &ClusterRemoteProxyInfo{} diff --git a/typedapi/types/clusterremotesniffinfo.go b/typedapi/types/clusterremotesniffinfo.go old mode 100755 new mode 100644 index b6aabb1f6b..f711a4638a --- a/typedapi/types/clusterremotesniffinfo.go +++ b/typedapi/types/clusterremotesniffinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterRemoteSniffInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L31-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L31-L39 type ClusterRemoteSniffInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -33,6 +43,100 @@ type ClusterRemoteSniffInfo struct { SkipUnavailable bool `json:"skip_unavailable"` } +func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "connected": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Connected = value + case bool: + s.Connected = v + } + + case "initial_connect_timeout": + if err := dec.Decode(&s.InitialConnectTimeout); err != nil { + return err + } + + case "max_connections_per_cluster": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxConnectionsPerCluster = value + case float64: + f := int(v) + s.MaxConnectionsPerCluster = f + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "num_nodes_connected": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumNodesConnected = value + case float64: + f := int64(v) + s.NumNodesConnected = f + } + + case "seeds": + if err := dec.Decode(&s.Seeds); err != nil { + return err + } + + case "skip_unavailable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SkipUnavailable = value + case bool: + s.SkipUnavailable = v + } + + } + } + return nil +} + // NewClusterRemoteSniffInfo returns a ClusterRemoteSniffInfo. func NewClusterRemoteSniffInfo() *ClusterRemoteSniffInfo { r := &ClusterRemoteSniffInfo{} diff --git a/typedapi/types/clusterruntimefieldtypes.go b/typedapi/types/clusterruntimefieldtypes.go old mode 100755 new mode 100644 index 8250ad84bf..a96bc8367e --- a/typedapi/types/clusterruntimefieldtypes.go +++ b/typedapi/types/clusterruntimefieldtypes.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L116-L131 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L116-L131 type ClusterRuntimeFieldTypes struct { CharsMax int `json:"chars_max"` CharsTotal int `json:"chars_total"` @@ -40,6 +50,228 @@ type ClusterRuntimeFieldTypes struct { SourceTotal int `json:"source_total"` } +func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chars_max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CharsMax = value + case float64: + f := int(v) + s.CharsMax = f + } + + case "chars_total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CharsTotal = value + case float64: + f := int(v) + s.CharsTotal = f + } + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "doc_max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DocMax = value + case float64: + f := int(v) + s.DocMax = f + } + + case "doc_total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DocTotal = value + case float64: + f := int(v) + s.DocTotal = f + } + + case "index_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IndexCount = value + case float64: + f := int(v) + s.IndexCount = f + } + + case "lang": + if err := dec.Decode(&s.Lang); err != nil { + return err + } + + case "lines_max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LinesMax = value + case float64: + f := int(v) + s.LinesMax = f + } + + case "lines_total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LinesTotal = value + case float64: + f := int(v) + s.LinesTotal = f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "scriptless_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ScriptlessCount = value + case float64: + f := int(v) + s.ScriptlessCount = f + } + + case "shadowed_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShadowedCount = value + case float64: + f := int(v) + s.ShadowedCount = f + } + + case "source_max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SourceMax = value + case float64: + f := int(v) + s.SourceMax = f + } + + case "source_total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SourceTotal = value + case float64: + f := int(v) + s.SourceTotal = f + } + + } + } + return nil +} + // NewClusterRuntimeFieldTypes returns a ClusterRuntimeFieldTypes. func NewClusterRuntimeFieldTypes() *ClusterRuntimeFieldTypes { r := &ClusterRuntimeFieldTypes{} diff --git a/typedapi/types/clustershardmetrics.go b/typedapi/types/clustershardmetrics.go old mode 100755 new mode 100644 index 453109f073..96f5777bf1 --- a/typedapi/types/clustershardmetrics.go +++ b/typedapi/types/clustershardmetrics.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterShardMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L277-L281 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L277-L281 type ClusterShardMetrics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` Min Float64 `json:"min"` } +func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Avg = f + case float64: + f := Float64(v) + s.Avg = f + } + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f + } + + } + } + return nil +} + // NewClusterShardMetrics returns a ClusterShardMetrics. func NewClusterShardMetrics() *ClusterShardMetrics { r := &ClusterShardMetrics{} diff --git a/typedapi/types/clusterstatequeue.go b/typedapi/types/clusterstatequeue.go old mode 100755 new mode 100644 index f97f404dff..bb23a579db --- a/typedapi/types/clusterstatequeue.go +++ b/typedapi/types/clusterstatequeue.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterStateQueue type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L114-L118 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L114-L118 type ClusterStateQueue struct { Committed *int64 `json:"committed,omitempty"` Pending *int64 `json:"pending,omitempty"` Total *int64 `json:"total,omitempty"` } +func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "committed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Committed = &value + case float64: + f := int64(v) + s.Committed = &f + } + + case "pending": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Pending = &value + case float64: + f := int64(v) + s.Pending = &f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = &value + case float64: + f := int64(v) + s.Total = &f + } + + } + } + return nil +} + // NewClusterStateQueue returns a ClusterStateQueue. func NewClusterStateQueue() *ClusterStateQueue { r := &ClusterStateQueue{} diff --git a/typedapi/types/clusterstateupdate.go b/typedapi/types/clusterstateupdate.go old mode 100755 new mode 100644 index 3c885f7dac..06c816253b --- a/typedapi/types/clusterstateupdate.go +++ b/typedapi/types/clusterstateupdate.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterStateUpdate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L126-L142 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L126-L142 type ClusterStateUpdate struct { CommitTime Duration `json:"commit_time,omitempty"` CommitTimeMillis *int64 `json:"commit_time_millis,omitempty"` @@ -41,6 +51,111 @@ type ClusterStateUpdate struct { PublicationTimeMillis *int64 `json:"publication_time_millis,omitempty"` } +func (s *ClusterStateUpdate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "commit_time": + if err := dec.Decode(&s.CommitTime); err != nil { + return err + } + + case "commit_time_millis": + if err := dec.Decode(&s.CommitTimeMillis); err != nil { + return err + } + + case "completion_time": + if err := dec.Decode(&s.CompletionTime); err != nil { + return err + } + + case "completion_time_millis": + if err := dec.Decode(&s.CompletionTimeMillis); err != nil { + return err + } + + case "computation_time": + if err := dec.Decode(&s.ComputationTime); err != nil { + return err + } + + case "computation_time_millis": + if err := dec.Decode(&s.ComputationTimeMillis); err != nil { + return err + } + + case "context_construction_time": + if err := dec.Decode(&s.ContextConstructionTime); err != nil { + return err + } + + case "context_construction_time_millis": + if err := dec.Decode(&s.ContextConstructionTimeMillis); err != nil { + return err + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "master_apply_time": + if err := dec.Decode(&s.MasterApplyTime); err != nil { + return err + } + + case "master_apply_time_millis": + if err := dec.Decode(&s.MasterApplyTimeMillis); err != nil { + return err + } + + case "notification_time": + if err := dec.Decode(&s.NotificationTime); err != nil { + return err + } + + case "notification_time_millis": + if err := dec.Decode(&s.NotificationTimeMillis); err != nil { + return err + } + + case "publication_time": + if err := dec.Decode(&s.PublicationTime); err != nil { + return err + } + + case "publication_time_millis": + if err := dec.Decode(&s.PublicationTimeMillis); err != nil { + return err + } + + } + } + return nil +} + // NewClusterStateUpdate returns a ClusterStateUpdate. func NewClusterStateUpdate() *ClusterStateUpdate { r := &ClusterStateUpdate{} diff --git a/typedapi/types/clusterstatistics.go b/typedapi/types/clusterstatistics.go old mode 100755 new mode 100644 index 7ff83603e0..7aa69051d6 --- a/typedapi/types/clusterstatistics.go +++ b/typedapi/types/clusterstatistics.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ClusterStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L27-L31 type ClusterStatistics struct { Skipped int `json:"skipped"` Successful int `json:"successful"` Total int `json:"total"` } +func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "skipped": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Skipped = value + case float64: + f := int(v) + s.Skipped = f + } + + case "successful": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Successful = value + case float64: + f := int(v) + s.Successful = f + } + + case "total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Total = value + case float64: + f := int(v) + s.Total = f + } + + } + } + return nil +} + // NewClusterStatistics returns a ClusterStatistics. func NewClusterStatistics() *ClusterStatistics { r := &ClusterStatistics{} diff --git a/typedapi/types/collector.go b/typedapi/types/collector.go old mode 100755 new mode 100644 index 8fb3eac979..2eb83ed942 --- a/typedapi/types/collector.go +++ b/typedapi/types/collector.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Collector type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L86-L91 type Collector struct { Children []Collector `json:"children,omitempty"` Name string `json:"name"` @@ -30,6 +38,52 @@ type Collector struct { TimeInNanos int64 `json:"time_in_nanos"` } +func (s *Collector) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "children": + if err := dec.Decode(&s.Children); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = o + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "time_in_nanos": + if err := dec.Decode(&s.TimeInNanos); err != nil { + return err + } + + } + } + return nil +} + // NewCollector returns a Collector. func NewCollector() *Collector { r := &Collector{} diff --git a/typedapi/types/column.go b/typedapi/types/column.go old mode 100755 new mode 100644 index 349e31131f..7110180e25 --- a/typedapi/types/column.go +++ b/typedapi/types/column.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Column type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/types.ts#L23-L26 type Column struct { Name string `json:"name"` Type string `json:"type"` } +func (s *Column) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewColumn returns a Column. func NewColumn() *Column { r := &Column{} diff --git a/typedapi/types/combinedfieldsquery.go b/typedapi/types/combinedfieldsquery.go old mode 100755 new mode 100644 index 2d1cd66c2b..57a047740a --- a/typedapi/types/combinedfieldsquery.go +++ b/typedapi/types/combinedfieldsquery.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/combinedfieldsoperator" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/combinedfieldszeroterms" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // CombinedFieldsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L181-L195 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L181-L195 type CombinedFieldsQuery struct { AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -39,6 +47,92 @@ type CombinedFieldsQuery struct { ZeroTermsQuery *combinedfieldszeroterms.CombinedFieldsZeroTerms `json:"zero_terms_query,omitempty"` } +func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auto_generate_synonyms_phrase_query": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AutoGenerateSynonymsPhraseQuery = &value + case bool: + s.AutoGenerateSynonymsPhraseQuery = &v + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "operator": + if err := dec.Decode(&s.Operator); err != nil { + return err + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "zero_terms_query": + if err := dec.Decode(&s.ZeroTermsQuery); err != nil { + return err + } + + } + } + return nil +} + // NewCombinedFieldsQuery returns a CombinedFieldsQuery. func NewCombinedFieldsQuery() *CombinedFieldsQuery { r := &CombinedFieldsQuery{} diff --git a/typedapi/types/command.go b/typedapi/types/command.go old mode 100755 new mode 100644 index a382ff3956..927e723638 --- a/typedapi/types/command.go +++ b/typedapi/types/command.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Command type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L22-L43 type Command struct { // AllocateEmptyPrimary Allocate an empty primary shard to a node. Accepts the index and shard for // index name and shard number, and node to allocate the shard to. Using this diff --git a/typedapi/types/commandallocateprimaryaction.go b/typedapi/types/commandallocateprimaryaction.go old mode 100755 new mode 100644 index ca8a7da5d4..df3587ca73 --- a/typedapi/types/commandallocateprimaryaction.go +++ b/typedapi/types/commandallocateprimaryaction.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CommandAllocatePrimaryAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L78-L84 type CommandAllocatePrimaryAction struct { // AcceptDataLoss If a node which has a copy of the data rejoins the cluster later on, that // data will be deleted. To ensure that these implications are well-understood, @@ -33,6 +43,69 @@ type CommandAllocatePrimaryAction struct { Shard int `json:"shard"` } +func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "accept_data_loss": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AcceptDataLoss = value + case bool: + s.AcceptDataLoss = v + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + } + } + return nil +} + // NewCommandAllocatePrimaryAction returns a CommandAllocatePrimaryAction. func NewCommandAllocatePrimaryAction() *CommandAllocatePrimaryAction { r := &CommandAllocatePrimaryAction{} diff --git a/typedapi/types/commandallocatereplicaaction.go b/typedapi/types/commandallocatereplicaaction.go old mode 100755 new mode 100644 index 6f270c399b..0154b440a9 --- a/typedapi/types/commandallocatereplicaaction.go +++ b/typedapi/types/commandallocatereplicaaction.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CommandAllocateReplicaAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L69-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L69-L76 type CommandAllocateReplicaAction struct { Index string `json:"index"` Node string `json:"node"` Shard int `json:"shard"` } +func (s *CommandAllocateReplicaAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + } + } + return nil +} + // NewCommandAllocateReplicaAction returns a CommandAllocateReplicaAction. func NewCommandAllocateReplicaAction() *CommandAllocateReplicaAction { r := &CommandAllocateReplicaAction{} diff --git a/typedapi/types/commandcancelaction.go b/typedapi/types/commandcancelaction.go old mode 100755 new mode 100644 index 3759825b9a..d586d01b5e --- a/typedapi/types/commandcancelaction.go +++ b/typedapi/types/commandcancelaction.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CommandCancelAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L45-L50 type CommandCancelAction struct { AllowPrimary *bool `json:"allow_primary,omitempty"` Index string `json:"index"` @@ -30,6 +40,69 @@ type CommandCancelAction struct { Shard int `json:"shard"` } +func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowPrimary = &value + case bool: + s.AllowPrimary = &v + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + } + } + return nil +} + // NewCommandCancelAction returns a CommandCancelAction. func NewCommandCancelAction() *CommandCancelAction { r := &CommandCancelAction{} diff --git a/typedapi/types/commandmoveaction.go b/typedapi/types/commandmoveaction.go old mode 100755 new mode 100644 index 0ffb65a311..07bcd04cad --- a/typedapi/types/commandmoveaction.go +++ b/typedapi/types/commandmoveaction.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CommandMoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L60-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L60-L67 type CommandMoveAction struct { // FromNode The node to move the shard from FromNode string `json:"from_node"` @@ -32,6 +42,63 @@ type CommandMoveAction struct { ToNode string `json:"to_node"` } +func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from_node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FromNode = o + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + case "to_node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ToNode = o + + } + } + return nil +} + // NewCommandMoveAction returns a CommandMoveAction. func NewCommandMoveAction() *CommandMoveAction { r := &CommandMoveAction{} diff --git a/typedapi/types/commongramstokenfilter.go b/typedapi/types/commongramstokenfilter.go old mode 100755 new mode 100644 index 4f30668e9e..e6fd7fd420 --- a/typedapi/types/commongramstokenfilter.go +++ b/typedapi/types/commongramstokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CommonGramsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L172-L178 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L173-L179 type CommonGramsTokenFilter struct { CommonWords []string `json:"common_words,omitempty"` CommonWordsPath *string `json:"common_words_path,omitempty"` @@ -32,6 +42,77 @@ type CommonGramsTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "common_words": + if err := dec.Decode(&s.CommonWords); err != nil { + return err + } + + case "common_words_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CommonWordsPath = &o + + case "ignore_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreCase = &value + case bool: + s.IgnoreCase = &v + } + + case "query_mode": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.QueryMode = &value + case bool: + s.QueryMode = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewCommonGramsTokenFilter returns a CommonGramsTokenFilter. func NewCommonGramsTokenFilter() *CommonGramsTokenFilter { r := &CommonGramsTokenFilter{} diff --git a/typedapi/types/commontermsquery.go b/typedapi/types/commontermsquery.go old mode 100755 new mode 100644 index 912ce38ddb..9c20954116 --- a/typedapi/types/commontermsquery.go +++ b/typedapi/types/commontermsquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/operator" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // CommonTermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L33-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L33-L43 type CommonTermsQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -38,6 +46,102 @@ type CommonTermsQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Query) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "cutoff_frequency": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.CutoffFrequency = &f + case float64: + f := Float64(v) + s.CutoffFrequency = &f + } + + case "high_freq_operator": + if err := dec.Decode(&s.HighFreqOperator); err != nil { + return err + } + + case "low_freq_operator": + if err := dec.Decode(&s.LowFreqOperator); err != nil { + return err + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewCommonTermsQuery returns a CommonTermsQuery. func NewCommonTermsQuery() *CommonTermsQuery { r := &CommonTermsQuery{} diff --git a/typedapi/types/compactnodeinfo.go b/typedapi/types/compactnodeinfo.go old mode 100755 new mode 100644 index f6e898832d..a180462053 --- a/typedapi/types/compactnodeinfo.go +++ b/typedapi/types/compactnodeinfo.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // CompactNodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 type CompactNodeInfo struct { Name string `json:"name"` } +func (s *CompactNodeInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewCompactNodeInfo returns a CompactNodeInfo. func NewCompactNodeInfo() *CompactNodeInfo { r := &CompactNodeInfo{} diff --git a/typedapi/types/completioncontext.go b/typedapi/types/completioncontext.go old mode 100755 new mode 100644 index 702fc6e22b..6f8554e58f --- a/typedapi/types/completioncontext.go +++ b/typedapi/types/completioncontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,12 +25,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // CompletionContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L155-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L159-L166 type CompletionContext struct { Boost *Float64 `json:"boost,omitempty"` Context Context `json:"context"` @@ -40,6 +42,12 @@ type CompletionContext struct { } func (s *CompletionContext) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Context) + return err + } + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -54,11 +62,23 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "context": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -88,8 +108,17 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { } case "prefix": - if err := dec.Decode(&s.Prefix); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Prefix = &value + case bool: + s.Prefix = &v } } diff --git a/typedapi/types/completionproperty.go b/typedapi/types/completionproperty.go old mode 100755 new mode 100644 index e695131d86..9e04467944 --- a/typedapi/types/completionproperty.go +++ b/typedapi/types/completionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // CompletionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L27-L35 type CompletionProperty struct { Analyzer *string `json:"analyzer,omitempty"` Contexts []SuggestContext `json:"contexts,omitempty"` @@ -54,6 +56,7 @@ type CompletionProperty struct { } func (s *CompletionProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -68,9 +71,12 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { switch t { case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "contexts": if err := dec.Decode(&s.Contexts); err != nil { @@ -78,13 +84,33 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -93,6 +119,9 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -380,38 +409,84 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "max_input_length": - if err := dec.Decode(&s.MaxInputLength); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxInputLength = &value + case float64: + f := int(v) + s.MaxInputLength = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "preserve_position_increments": - if err := dec.Decode(&s.PreservePositionIncrements); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreservePositionIncrements = &value + case bool: + s.PreservePositionIncrements = &v } case "preserve_separators": - if err := dec.Decode(&s.PreserveSeparators); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveSeparators = &value + case bool: + s.PreserveSeparators = &v } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -699,25 +774,40 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "search_analyzer": - if err := dec.Decode(&s.SearchAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchAnalyzer = &o case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/completionstats.go b/typedapi/types/completionstats.go old mode 100755 new mode 100644 index 1e293330fe..0e11f2d49b --- a/typedapi/types/completionstats.go +++ b/typedapi/types/completionstats.go @@ -16,19 +16,77 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CompletionStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L53-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L53-L57 type CompletionStats struct { Fields map[string]FieldSizeUsage `json:"fields,omitempty"` Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` } +func (s *CompletionStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]FieldSizeUsage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewCompletionStats returns a CompletionStats. func NewCompletionStats() *CompletionStats { r := &CompletionStats{ diff --git a/typedapi/types/completionsuggest.go b/typedapi/types/completionsuggest.go old mode 100755 new mode 100644 index a66e3f380f..865c8747d3 --- a/typedapi/types/completionsuggest.go +++ b/typedapi/types/completionsuggest.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CompletionSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L48-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L48-L55 type CompletionSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -30,6 +40,82 @@ type CompletionSuggest struct { Text string `json:"text"` } +func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Length = value + case float64: + f := int(v) + s.Length = f + } + + case "offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Offset = value + case float64: + f := int(v) + s.Offset = f + } + + case "options": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewCompletionSuggestOption() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Options = append(s.Options, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { + return err + } + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewCompletionSuggest returns a CompletionSuggest. func NewCompletionSuggest() *CompletionSuggest { r := &CompletionSuggest{} diff --git a/typedapi/types/completionsuggester.go b/typedapi/types/completionsuggester.go old mode 100755 new mode 100644 index 5d49a72bcb..2eba6bba98 --- a/typedapi/types/completionsuggester.go +++ b/typedapi/types/completionsuggester.go @@ -16,24 +16,131 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CompletionSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L130-L136 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L130-L135 type CompletionSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Contexts map[string][]CompletionContext `json:"contexts,omitempty"` Field string `json:"field"` Fuzzy *SuggestFuzziness `json:"fuzzy,omitempty"` - Prefix *string `json:"prefix,omitempty"` - Regex *string `json:"regex,omitempty"` + Regex *RegexOptions `json:"regex,omitempty"` Size *int `json:"size,omitempty"` SkipDuplicates *bool `json:"skip_duplicates,omitempty"` } +func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "contexts": + if s.Contexts == nil { + s.Contexts = make(map[string][]CompletionContext, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := NewCompletionContext() + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Contexts[key] = append(s.Contexts[key], *o) + default: + o := []CompletionContext{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Contexts[key] = o + } + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "fuzzy": + if err := dec.Decode(&s.Fuzzy); err != nil { + return err + } + + case "regex": + if err := dec.Decode(&s.Regex); err != nil { + return err + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "skip_duplicates": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SkipDuplicates = &value + case bool: + s.SkipDuplicates = &v + } + + } + } + return nil +} + // NewCompletionSuggester returns a CompletionSuggester. func NewCompletionSuggester() *CompletionSuggester { r := &CompletionSuggester{ diff --git a/typedapi/types/completionsuggestoption.go b/typedapi/types/completionsuggestoption.go old mode 100755 new mode 100644 index 742a62ed63..392c422f3e --- a/typedapi/types/completionsuggestoption.go +++ b/typedapi/types/completionsuggestoption.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // CompletionSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L73-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L73-L84 type CompletionSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Contexts map[string][]Context `json:"contexts,omitempty"` @@ -40,6 +46,119 @@ type CompletionSuggestOption struct { Text string `json:"text"` } +func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collate_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CollateMatch = &value + case bool: + s.CollateMatch = &v + } + + case "contexts": + if s.Contexts == nil { + s.Contexts = make(map[string][]Context, 0) + } + if err := dec.Decode(&s.Contexts); err != nil { + return err + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Id_ = &o + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_routing": + if err := dec.Decode(&s.Routing_); err != nil { + return err + } + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = &f + case float64: + f := Float64(v) + s.Score = &f + } + + case "_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score_ = &f + case float64: + f := Float64(v) + s.Score_ = &f + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewCompletionSuggestOption returns a CompletionSuggestOption. func NewCompletionSuggestOption() *CompletionSuggestOption { r := &CompletionSuggestOption{ diff --git a/typedapi/types/componenttemplatenode.go b/typedapi/types/componenttemplatenode.go old mode 100755 new mode 100644 index 623ccb1ca6..5573c0dfa7 --- a/typedapi/types/componenttemplatenode.go +++ b/typedapi/types/componenttemplatenode.go @@ -16,21 +16,60 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ComponentTemplateNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/_types/ComponentTemplate.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/_types/ComponentTemplate.ts#L31-L36 type ComponentTemplateNode struct { - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` - Template ComponentTemplateSummary `json:"template"` - Version *int64 `json:"version,omitempty"` + Meta_ Metadata `json:"_meta,omitempty"` + Template ComponentTemplateSummary `json:"template"` + Version *int64 `json:"version,omitempty"` +} + +func (s *ComponentTemplateNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil } // NewComponentTemplateNode returns a ComponentTemplateNode. diff --git a/typedapi/types/componenttemplatesummary.go b/typedapi/types/componenttemplatesummary.go old mode 100755 new mode 100644 index 2376147374..6aaa448cad --- a/typedapi/types/componenttemplatesummary.go +++ b/typedapi/types/componenttemplatesummary.go @@ -16,25 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ComponentTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/_types/ComponentTemplate.ts#L38-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/_types/ComponentTemplate.ts#L38-L45 type ComponentTemplateSummary struct { Aliases map[string]AliasDefinition `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ Metadata `json:"_meta,omitempty"` Settings map[string]IndexSettings `json:"settings,omitempty"` Version *int64 `json:"version,omitempty"` } +func (s *ComponentTemplateSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]AliasDefinition, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "settings": + if s.Settings == nil { + s.Settings = make(map[string]IndexSettings, 0) + } + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewComponentTemplateSummary returns a ComponentTemplateSummary. func NewComponentTemplateSummary() *ComponentTemplateSummary { r := &ComponentTemplateSummary{ diff --git a/typedapi/types/compositeaggregate.go b/typedapi/types/compositeaggregate.go old mode 100755 new mode 100644 index 12bb3e7d34..b5279c4ec7 --- a/typedapi/types/compositeaggregate.go +++ b/typedapi/types/compositeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,14 +30,15 @@ import ( // CompositeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L617-L622 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L618-L623 type CompositeAggregate struct { - AfterKey map[string]FieldValue `json:"after_key,omitempty"` - Buckets BucketsCompositeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + AfterKey CompositeAggregateKey `json:"after_key,omitempty"` + Buckets BucketsCompositeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *CompositeAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,15 +64,17 @@ func (s *CompositeAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]CompositeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []CompositeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/compositeaggregatekey.go b/typedapi/types/compositeaggregatekey.go old mode 100755 new mode 100644 index 402ba7285f..5755c5370d --- a/typedapi/types/compositeaggregatekey.go +++ b/typedapi/types/compositeaggregatekey.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CompositeAggregateKey type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L77-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L77-L77 type CompositeAggregateKey map[string]FieldValue diff --git a/typedapi/types/compositeaggregation.go b/typedapi/types/compositeaggregation.go old mode 100755 new mode 100644 index fc146eba88..a7c434512d --- a/typedapi/types/compositeaggregation.go +++ b/typedapi/types/compositeaggregation.go @@ -16,25 +16,90 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // CompositeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L79-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L79-L84 type CompositeAggregation struct { - After map[string]FieldValue `json:"after,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + After CompositeAggregateKey `json:"after,omitempty"` + Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Size *int `json:"size,omitempty"` Sources []map[string]CompositeAggregationSource `json:"sources,omitempty"` } +func (s *CompositeAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "after": + if err := dec.Decode(&s.After); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sources": + if err := dec.Decode(&s.Sources); err != nil { + return err + } + + } + } + return nil +} + // NewCompositeAggregation returns a CompositeAggregation. func NewCompositeAggregation() *CompositeAggregation { r := &CompositeAggregation{} diff --git a/typedapi/types/compositeaggregationsource.go b/typedapi/types/compositeaggregationsource.go old mode 100755 new mode 100644 index ad9c45b759..da2865e62b --- a/typedapi/types/compositeaggregationsource.go +++ b/typedapi/types/compositeaggregationsource.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CompositeAggregationSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L86-L91 type CompositeAggregationSource struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` diff --git a/typedapi/types/compositebucket.go b/typedapi/types/compositebucket.go old mode 100755 new mode 100644 index 8e007a7195..521530282b --- a/typedapi/types/compositebucket.go +++ b/typedapi/types/compositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // CompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L624-L626 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L625-L627 type CompositeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` - Key map[string]FieldValue `json:"key"` + Key CompositeAggregateKey `json:"key"` } func (s *CompositeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": @@ -507,6 +78,519 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s CompositeBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/conditiontokenfilter.go b/typedapi/types/conditiontokenfilter.go old mode 100755 new mode 100644 index 5458c8d86d..97c93ca1f9 --- a/typedapi/types/conditiontokenfilter.go +++ b/typedapi/types/conditiontokenfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ConditionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L180-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L181-L185 type ConditionTokenFilter struct { Filter []string `json:"filter"` Script Script `json:"script"` @@ -30,6 +38,46 @@ type ConditionTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *ConditionTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewConditionTokenFilter returns a ConditionTokenFilter. func NewConditionTokenFilter() *ConditionTokenFilter { r := &ConditionTokenFilter{} diff --git a/typedapi/types/configuration.go b/typedapi/types/configuration.go old mode 100755 new mode 100644 index 103a50476b..5918cdcf64 --- a/typedapi/types/configuration.go +++ b/typedapi/types/configuration.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // Configuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 type Configuration struct { // FeatureStates A list of feature states to be included in this snapshot. A list of features // available for inclusion in the snapshot and their descriptions be can be @@ -53,12 +59,100 @@ type Configuration struct { // Metadata Attaches arbitrary metadata to the snapshot, such as a record of who took the // snapshot, why it was taken, or any other useful data. Metadata must be less // than 1024 bytes. - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` // Partial If false, the entire snapshot will fail if one or more indices included in // the snapshot do not have all primary shards available. Partial *bool `json:"partial,omitempty"` } +func (s *Configuration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_states": + if err := dec.Decode(&s.FeatureStates); err != nil { + return err + } + + case "ignore_unavailable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnavailable = &value + case bool: + s.IgnoreUnavailable = &v + } + + case "include_global_state": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeGlobalState = &value + case bool: + s.IncludeGlobalState = &v + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "partial": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Partial = &value + case bool: + s.Partial = &v + } + + } + } + return nil +} + // NewConfiguration returns a Configuration. func NewConfiguration() *Configuration { r := &Configuration{} diff --git a/typedapi/types/configurations.go b/typedapi/types/configurations.go old mode 100755 new mode 100644 index e971f67556..0ebb2b032c --- a/typedapi/types/configurations.go +++ b/typedapi/types/configurations.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Configurations type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L47-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L47-L51 type Configurations struct { Forcemerge *ForceMergeConfiguration `json:"forcemerge,omitempty"` Rollover *RolloverConditions `json:"rollover,omitempty"` diff --git a/typedapi/types/confusionmatrixitem.go b/typedapi/types/confusionmatrixitem.go old mode 100755 new mode 100644 index abce92de1a..40f6f80cd7 --- a/typedapi/types/confusionmatrixitem.go +++ b/typedapi/types/confusionmatrixitem.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ConfusionMatrixItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L84-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L84-L89 type ConfusionMatrixItem struct { ActualClass string `json:"actual_class"` ActualClassDocCount int `json:"actual_class_doc_count"` @@ -30,6 +40,68 @@ type ConfusionMatrixItem struct { PredictedClasses []ConfusionMatrixPrediction `json:"predicted_classes"` } +func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual_class": + if err := dec.Decode(&s.ActualClass); err != nil { + return err + } + + case "actual_class_doc_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActualClassDocCount = value + case float64: + f := int(v) + s.ActualClassDocCount = f + } + + case "other_predicted_class_doc_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OtherPredictedClassDocCount = value + case float64: + f := int(v) + s.OtherPredictedClassDocCount = f + } + + case "predicted_classes": + if err := dec.Decode(&s.PredictedClasses); err != nil { + return err + } + + } + } + return nil +} + // NewConfusionMatrixItem returns a ConfusionMatrixItem. func NewConfusionMatrixItem() *ConfusionMatrixItem { r := &ConfusionMatrixItem{} diff --git a/typedapi/types/confusionmatrixprediction.go b/typedapi/types/confusionmatrixprediction.go old mode 100755 new mode 100644 index 421543bda0..53de7d4347 --- a/typedapi/types/confusionmatrixprediction.go +++ b/typedapi/types/confusionmatrixprediction.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ConfusionMatrixPrediction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L91-L94 type ConfusionMatrixPrediction struct { Count int `json:"count"` PredictedClass string `json:"predicted_class"` } +func (s *ConfusionMatrixPrediction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "predicted_class": + if err := dec.Decode(&s.PredictedClass); err != nil { + return err + } + + } + } + return nil +} + // NewConfusionMatrixPrediction returns a ConfusionMatrixPrediction. func NewConfusionMatrixPrediction() *ConfusionMatrixPrediction { r := &ConfusionMatrixPrediction{} diff --git a/typedapi/types/confusionmatrixthreshold.go b/typedapi/types/confusionmatrixthreshold.go old mode 100755 new mode 100644 index 344fd4b1f2..a59ecef400 --- a/typedapi/types/confusionmatrixthreshold.go +++ b/typedapi/types/confusionmatrixthreshold.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ConfusionMatrixThreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L96-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L96-L117 type ConfusionMatrixThreshold struct { // FalseNegative False Negative FalseNegative int `json:"fn"` @@ -34,6 +44,90 @@ type ConfusionMatrixThreshold struct { TruePositive int `json:"tp"` } +func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fn": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FalseNegative = value + case float64: + f := int(v) + s.FalseNegative = f + } + + case "fp": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FalsePositive = value + case float64: + f := int(v) + s.FalsePositive = f + } + + case "tn": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TrueNegative = value + case float64: + f := int(v) + s.TrueNegative = f + } + + case "tp": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TruePositive = value + case float64: + f := int(v) + s.TruePositive = f + } + + } + } + return nil +} + // NewConfusionMatrixThreshold returns a ConfusionMatrixThreshold. func NewConfusionMatrixThreshold() *ConfusionMatrixThreshold { r := &ConfusionMatrixThreshold{} diff --git a/typedapi/types/connection.go b/typedapi/types/connection.go old mode 100755 new mode 100644 index 48542241c4..648b7d5a77 --- a/typedapi/types/connection.go +++ b/typedapi/types/connection.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Connection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/Connection.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/Connection.ts#L22-L27 type Connection struct { DocCount int64 `json:"doc_count"` Source int64 `json:"source"` @@ -30,6 +40,87 @@ type Connection struct { Weight Float64 `json:"weight"` } +func (s *Connection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f + } + + case "source": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Source = value + case float64: + f := int64(v) + s.Source = f + } + + case "target": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Target = value + case float64: + f := int64(v) + s.Target = f + } + + case "weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Weight = f + case float64: + f := Float64(v) + s.Weight = f + } + + } + } + return nil +} + // NewConnection returns a Connection. func NewConnection() *Connection { r := &Connection{} diff --git a/typedapi/types/constantkeywordproperty.go b/typedapi/types/constantkeywordproperty.go old mode 100755 new mode 100644 index 744c785b97..3370105684 --- a/typedapi/types/constantkeywordproperty.go +++ b/typedapi/types/constantkeywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ConstantKeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L44-L47 type ConstantKeywordProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -45,6 +47,7 @@ type ConstantKeywordProperty struct { } func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,6 +67,9 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -351,23 +357,40 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -655,7 +678,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/constantscorequery.go b/typedapi/types/constantscorequery.go old mode 100755 new mode 100644 index 578816e021..a72fef03a8 --- a/typedapi/types/constantscorequery.go +++ b/typedapi/types/constantscorequery.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ConstantScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L42-L44 type ConstantScoreQuery struct { Boost *float32 `json:"boost,omitempty"` Filter *Query `json:"filter,omitempty"` QueryName_ *string `json:"_name,omitempty"` } +func (s *ConstantScoreQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewConstantScoreQuery returns a ConstantScoreQuery. func NewConstantScoreQuery() *ConstantScoreQuery { r := &ConstantScoreQuery{} diff --git a/typedapi/types/context.go b/typedapi/types/context.go old mode 100755 new mode 100644 index b8d0714fae..50af1bf6d4 --- a/typedapi/types/context.go +++ b/typedapi/types/context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // GeoLocation // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L148-L153 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L152-L157 type Context interface{} diff --git a/typedapi/types/contextmethod.go b/typedapi/types/contextmethod.go old mode 100755 new mode 100644 index c1e80ec8cc..afb60dfdbb --- a/typedapi/types/contextmethod.go +++ b/typedapi/types/contextmethod.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ContextMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_context/types.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_context/types.ts#L27-L31 type ContextMethod struct { Name string `json:"name"` Params []ContextMethodParam `json:"params"` ReturnType string `json:"return_type"` } +func (s *ContextMethod) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "params": + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "return_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ReturnType = o + + } + } + return nil +} + // NewContextMethod returns a ContextMethod. func NewContextMethod() *ContextMethod { r := &ContextMethod{} diff --git a/typedapi/types/contextmethodparam.go b/typedapi/types/contextmethodparam.go old mode 100755 new mode 100644 index 789489bc21..ae952c9a16 --- a/typedapi/types/contextmethodparam.go +++ b/typedapi/types/contextmethodparam.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ContextMethodParam type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_context/types.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_context/types.ts#L33-L36 type ContextMethodParam struct { Name string `json:"name"` Type string `json:"type"` } +func (s *ContextMethodParam) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewContextMethodParam returns a ContextMethodParam. func NewContextMethodParam() *ContextMethodParam { r := &ContextMethodParam{} diff --git a/typedapi/types/convertprocessor.go b/typedapi/types/convertprocessor.go old mode 100755 new mode 100644 index 6c0f48115d..b5b8c6d546 --- a/typedapi/types/convertprocessor.go +++ b/typedapi/types/convertprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/converttype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ConvertProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L147-L152 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L147-L152 type ConvertProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -39,6 +47,98 @@ type ConvertProcessor struct { Type converttype.ConvertType `json:"type"` } +func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewConvertProcessor returns a ConvertProcessor. func NewConvertProcessor() *ConvertProcessor { r := &ConvertProcessor{} diff --git a/typedapi/types/coordinatorstats.go b/typedapi/types/coordinatorstats.go old mode 100755 new mode 100644 index 61bce50d66..abddf52ea7 --- a/typedapi/types/coordinatorstats.go +++ b/typedapi/types/coordinatorstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CoordinatorStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/stats/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/stats/types.ts#L29-L35 type CoordinatorStats struct { ExecutedSearchesTotal int64 `json:"executed_searches_total"` NodeId string `json:"node_id"` @@ -31,6 +41,93 @@ type CoordinatorStats struct { RemoteRequestsTotal int64 `json:"remote_requests_total"` } +func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "executed_searches_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ExecutedSearchesTotal = value + case float64: + f := int64(v) + s.ExecutedSearchesTotal = f + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "queue_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueSize = value + case float64: + f := int(v) + s.QueueSize = f + } + + case "remote_requests_current": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RemoteRequestsCurrent = value + case float64: + f := int(v) + s.RemoteRequestsCurrent = f + } + + case "remote_requests_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RemoteRequestsTotal = value + case float64: + f := int64(v) + s.RemoteRequestsTotal = f + } + + } + } + return nil +} + // NewCoordinatorStats returns a CoordinatorStats. func NewCoordinatorStats() *CoordinatorStats { r := &CoordinatorStats{} diff --git a/typedapi/types/coordsgeobounds.go b/typedapi/types/coordsgeobounds.go old mode 100755 new mode 100644 index c39a852516..4dab35be1d --- a/typedapi/types/coordsgeobounds.go +++ b/typedapi/types/coordsgeobounds.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CoordsGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L138-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L138-L143 type CoordsGeoBounds struct { Bottom Float64 `json:"bottom"` Left Float64 `json:"left"` @@ -30,6 +40,90 @@ type CoordsGeoBounds struct { Top Float64 `json:"top"` } +func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bottom": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Bottom = f + case float64: + f := Float64(v) + s.Bottom = f + } + + case "left": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Left = f + case float64: + f := Float64(v) + s.Left = f + } + + case "right": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Right = f + case float64: + f := Float64(v) + s.Right = f + } + + case "top": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Top = f + case float64: + f := Float64(v) + s.Top = f + } + + } + } + return nil +} + // NewCoordsGeoBounds returns a CoordsGeoBounds. func NewCoordsGeoBounds() *CoordsGeoBounds { r := &CoordsGeoBounds{} diff --git a/typedapi/types/coreknnquery.go b/typedapi/types/coreknnquery.go old mode 100755 new mode 100644 index 04a8dba52d..08249b5cf2 --- a/typedapi/types/coreknnquery.go +++ b/typedapi/types/coreknnquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CoreKnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/knn_search/_types/Knn.ts#L24-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/knn_search/_types/Knn.ts#L24-L33 type CoreKnnQuery struct { // Field The name of the vector field to search against Field string `json:"field"` @@ -34,6 +44,66 @@ type CoreKnnQuery struct { QueryVector []float32 `json:"query_vector"` } +func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "k": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.K = value + case float64: + f := int64(v) + s.K = f + } + + case "num_candidates": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumCandidates = value + case float64: + f := int64(v) + s.NumCandidates = f + } + + case "query_vector": + if err := dec.Decode(&s.QueryVector); err != nil { + return err + } + + } + } + return nil +} + // NewCoreKnnQuery returns a CoreKnnQuery. func NewCoreKnnQuery() *CoreKnnQuery { r := &CoreKnnQuery{} diff --git a/typedapi/types/counter.go b/typedapi/types/counter.go old mode 100755 new mode 100644 index e3c592268d..d30075fd57 --- a/typedapi/types/counter.go +++ b/typedapi/types/counter.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Counter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L35-L38 type Counter struct { Active int64 `json:"active"` Total int64 `json:"total"` } +func (s *Counter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Active = value + case float64: + f := int64(v) + s.Active = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewCounter returns a Counter. func NewCounter() *Counter { r := &Counter{} diff --git a/typedapi/types/countrecord.go b/typedapi/types/countrecord.go old mode 100755 new mode 100644 index af72424ae2..e7f8afce32 --- a/typedapi/types/countrecord.go +++ b/typedapi/types/countrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // CountRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/count/types.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/count/types.ts#L23-L39 type CountRecord struct { // Count the document count Count *string `json:"count,omitempty"` @@ -32,6 +40,44 @@ type CountRecord struct { Timestamp *string `json:"timestamp,omitempty"` } +func (s *CountRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count", "dc", "docs.count", "docsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Count = &o + + case "epoch", "t", "time": + if err := dec.Decode(&s.Epoch); err != nil { + return err + } + + case "timestamp", "ts", "hms", "hhmmss": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewCountRecord returns a CountRecord. func NewCountRecord() *CountRecord { r := &CountRecord{} diff --git a/typedapi/types/cpu.go b/typedapi/types/cpu.go old mode 100755 new mode 100644 index 9640cdf08f..4a136ce191 --- a/typedapi/types/cpu.go +++ b/typedapi/types/cpu.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Cpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L218-L227 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L218-L227 type Cpu struct { LoadAverage map[string]Float64 `json:"load_average,omitempty"` Percent *int `json:"percent,omitempty"` @@ -34,6 +44,80 @@ type Cpu struct { UserInMillis *int64 `json:"user_in_millis,omitempty"` } +func (s *Cpu) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "load_average": + if s.LoadAverage == nil { + s.LoadAverage = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.LoadAverage); err != nil { + return err + } + + case "percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Percent = &value + case float64: + f := int(v) + s.Percent = &f + } + + case "sys": + if err := dec.Decode(&s.Sys); err != nil { + return err + } + + case "sys_in_millis": + if err := dec.Decode(&s.SysInMillis); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + case "total_in_millis": + if err := dec.Decode(&s.TotalInMillis); err != nil { + return err + } + + case "user": + if err := dec.Decode(&s.User); err != nil { + return err + } + + case "user_in_millis": + if err := dec.Decode(&s.UserInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewCpu returns a Cpu. func NewCpu() *Cpu { r := &Cpu{ diff --git a/typedapi/types/cpuacct.go b/typedapi/types/cpuacct.go old mode 100755 new mode 100644 index fd4f731c75..eb1b006616 --- a/typedapi/types/cpuacct.go +++ b/typedapi/types/cpuacct.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // CpuAcct type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L194-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L194-L197 type CpuAcct struct { ControlGroup *string `json:"control_group,omitempty"` UsageNanos *int64 `json:"usage_nanos,omitempty"` } +func (s *CpuAcct) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "control_group": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ControlGroup = &o + + case "usage_nanos": + if err := dec.Decode(&s.UsageNanos); err != nil { + return err + } + + } + } + return nil +} + // NewCpuAcct returns a CpuAcct. func NewCpuAcct() *CpuAcct { r := &CpuAcct{} diff --git a/typedapi/types/createdstatus.go b/typedapi/types/createdstatus.go old mode 100755 new mode 100644 index 4b8eaafdf3..10ae1b518d --- a/typedapi/types/createdstatus.go +++ b/typedapi/types/createdstatus.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CreatedStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/CreatedStatus.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/CreatedStatus.ts#L20-L22 type CreatedStatus struct { Created bool `json:"created"` } +func (s *CreatedStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "created": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Created = value + case bool: + s.Created = v + } + + } + } + return nil +} + // NewCreatedStatus returns a CreatedStatus. func NewCreatedStatus() *CreatedStatus { r := &CreatedStatus{} diff --git a/typedapi/types/csvprocessor.go b/typedapi/types/csvprocessor.go old mode 100755 new mode 100644 index 660999d959..efc97b30d2 --- a/typedapi/types/csvprocessor.go +++ b/typedapi/types/csvprocessor.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // CsvProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L154-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L154-L162 type CsvProcessor struct { Description *string `json:"description,omitempty"` EmptyValue json.RawMessage `json:"empty_value,omitempty"` @@ -42,6 +48,139 @@ type CsvProcessor struct { Trim *bool `json:"trim,omitempty"` } +func (s *CsvProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "empty_value": + if err := dec.Decode(&s.EmptyValue); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "quote": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Quote = &o + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = &o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.TargetFields = append(s.TargetFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.TargetFields); err != nil { + return err + } + } + + case "trim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Trim = &value + case bool: + s.Trim = &v + } + + } + } + return nil +} + // NewCsvProcessor returns a CsvProcessor. func NewCsvProcessor() *CsvProcessor { r := &CsvProcessor{} diff --git a/typedapi/types/cumulativecardinalityaggregate.go b/typedapi/types/cumulativecardinalityaggregate.go old mode 100755 new mode 100644 index adde38a08d..b2dbeb00ec --- a/typedapi/types/cumulativecardinalityaggregate.go +++ b/typedapi/types/cumulativecardinalityaggregate.go @@ -16,21 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // CumulativeCardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L738-L746 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L747-L755 type CumulativeCardinalityAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Value int64 `json:"value"` - ValueAsString *string `json:"value_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Value int64 `json:"value"` + ValueAsString *string `json:"value_as_string,omitempty"` +} + +func (s *CumulativeCardinalityAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Value = value + case float64: + f := int64(v) + s.Value = f + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil } // NewCumulativeCardinalityAggregate returns a CumulativeCardinalityAggregate. diff --git a/typedapi/types/cumulativecardinalityaggregation.go b/typedapi/types/cumulativecardinalityaggregation.go old mode 100755 new mode 100644 index af6637288b..64aa683f38 --- a/typedapi/types/cumulativecardinalityaggregation.go +++ b/typedapi/types/cumulativecardinalityaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // CumulativeCardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L161-L161 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L161-L161 type CumulativeCardinalityAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/cumulativesumaggregation.go b/typedapi/types/cumulativesumaggregation.go old mode 100755 new mode 100644 index 90adb548cc..0ff15adf66 --- a/typedapi/types/cumulativesumaggregation.go +++ b/typedapi/types/cumulativesumaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // CumulativeSumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L163-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L163-L163 type CumulativeSumAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/currentnode.go b/typedapi/types/currentnode.go old mode 100755 new mode 100644 index d830b213b2..06645da6ab --- a/typedapi/types/currentnode.go +++ b/typedapi/types/currentnode.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CurrentNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L78-L84 type CurrentNode struct { Attributes map[string]string `json:"attributes"` Id string `json:"id"` @@ -31,6 +41,65 @@ type CurrentNode struct { WeightRanking int `json:"weight_ranking"` } +func (s *CurrentNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + case "weight_ranking": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.WeightRanking = value + case float64: + f := int(v) + s.WeightRanking = f + } + + } + } + return nil +} + // NewCurrentNode returns a CurrentNode. func NewCurrentNode() *CurrentNode { r := &CurrentNode{ diff --git a/typedapi/types/customanalyzer.go b/typedapi/types/customanalyzer.go old mode 100755 new mode 100644 index 7837f3d4ae..f9c0ac42e5 --- a/typedapi/types/customanalyzer.go +++ b/typedapi/types/customanalyzer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // CustomAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L28-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L28-L35 type CustomAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` @@ -32,6 +42,81 @@ type CustomAnalyzer struct { Type string `json:"type,omitempty"` } +func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "char_filter": + if err := dec.Decode(&s.CharFilter); err != nil { + return err + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "position_increment_gap": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PositionIncrementGap = &value + case float64: + f := int(v) + s.PositionIncrementGap = &f + } + + case "position_offset_gap": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PositionOffsetGap = &value + case float64: + f := int(v) + s.PositionOffsetGap = &f + } + + case "tokenizer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tokenizer = o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewCustomAnalyzer returns a CustomAnalyzer. func NewCustomAnalyzer() *CustomAnalyzer { r := &CustomAnalyzer{} diff --git a/typedapi/types/customcategorizetextanalyzer.go b/typedapi/types/customcategorizetextanalyzer.go old mode 100755 new mode 100644 index e7f6c161af..77f434ca96 --- a/typedapi/types/customcategorizetextanalyzer.go +++ b/typedapi/types/customcategorizetextanalyzer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CustomCategorizeTextAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L508-L512 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L508-L512 type CustomCategorizeTextAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` diff --git a/typedapi/types/customnormalizer.go b/typedapi/types/customnormalizer.go old mode 100755 new mode 100644 index d768de2bfd..dc07c3f27a --- a/typedapi/types/customnormalizer.go +++ b/typedapi/types/customnormalizer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // CustomNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/normalizers.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/normalizers.ts#L30-L34 type CustomNormalizer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` diff --git a/typedapi/types/customsettings.go b/typedapi/types/customsettings.go old mode 100755 new mode 100644 index ce77ad5e3e..c35c012921 --- a/typedapi/types/customsettings.go +++ b/typedapi/types/customsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ import "encoding/json" // CustomSettings type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Settings.ts#L22-L27 -type CustomSettings json.RawMessage +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Settings.ts#L22-L27 +type CustomSettings = json.RawMessage diff --git a/typedapi/types/dailyschedule.go b/typedapi/types/dailyschedule.go old mode 100755 new mode 100644 index a1c454b55d..b59b102a4c --- a/typedapi/types/dailyschedule.go +++ b/typedapi/types/dailyschedule.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DailySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L33-L35 type DailySchedule struct { At []ScheduleTimeOfDay `json:"at"` } +func (s *DailySchedule) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "at": + if err := dec.Decode(&s.At); err != nil { + return err + } + + } + } + return nil +} + // NewDailySchedule returns a DailySchedule. func NewDailySchedule() *DailySchedule { r := &DailySchedule{} diff --git a/typedapi/types/danglingindex.go b/typedapi/types/danglingindex.go old mode 100755 new mode 100644 index 4cbc480037..c5f6002ff1 --- a/typedapi/types/danglingindex.go +++ b/typedapi/types/danglingindex.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DanglingIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 type DanglingIndex struct { CreationDateMillis int64 `json:"creation_date_millis"` IndexName string `json:"index_name"` @@ -30,6 +38,63 @@ type DanglingIndex struct { NodeIds []string `json:"node_ids"` } +func (s *DanglingIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "creation_date_millis": + if err := dec.Decode(&s.CreationDateMillis); err != nil { + return err + } + + case "index_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexName = o + + case "index_uuid": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexUuid = o + + case "node_ids": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.NodeIds = append(s.NodeIds, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NodeIds); err != nil { + return err + } + } + + } + } + return nil +} + // NewDanglingIndex returns a DanglingIndex. func NewDanglingIndex() *DanglingIndex { r := &DanglingIndex{} diff --git a/typedapi/types/datacounts.go b/typedapi/types/datacounts.go old mode 100755 new mode 100644 index a9192fe8bf..24b55a61a8 --- a/typedapi/types/datacounts.go +++ b/typedapi/types/datacounts.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L129-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L129-L149 type DataCounts struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp *int64 `json:"earliest_record_timestamp,omitempty"` @@ -45,6 +55,301 @@ type DataCounts struct { SparseBucketCount int64 `json:"sparse_bucket_count"` } +func (s *DataCounts) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bucket_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BucketCount = value + case float64: + f := int64(v) + s.BucketCount = f + } + + case "earliest_record_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EarliestRecordTimestamp = &value + case float64: + f := int64(v) + s.EarliestRecordTimestamp = &f + } + + case "empty_bucket_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EmptyBucketCount = value + case float64: + f := int64(v) + s.EmptyBucketCount = f + } + + case "input_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InputBytes = value + case float64: + f := int64(v) + s.InputBytes = f + } + + case "input_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InputFieldCount = value + case float64: + f := int64(v) + s.InputFieldCount = f + } + + case "input_record_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InputRecordCount = value + case float64: + f := int64(v) + s.InputRecordCount = f + } + + case "invalid_date_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InvalidDateCount = value + case float64: + f := int64(v) + s.InvalidDateCount = f + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "last_data_time": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LastDataTime = &value + case float64: + f := int64(v) + s.LastDataTime = &f + } + + case "latest_bucket_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LatestBucketTimestamp = &value + case float64: + f := int64(v) + s.LatestBucketTimestamp = &f + } + + case "latest_empty_bucket_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LatestEmptyBucketTimestamp = &value + case float64: + f := int64(v) + s.LatestEmptyBucketTimestamp = &f + } + + case "latest_record_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LatestRecordTimestamp = &value + case float64: + f := int64(v) + s.LatestRecordTimestamp = &f + } + + case "latest_sparse_bucket_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LatestSparseBucketTimestamp = &value + case float64: + f := int64(v) + s.LatestSparseBucketTimestamp = &f + } + + case "log_time": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LogTime = &value + case float64: + f := int64(v) + s.LogTime = &f + } + + case "missing_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MissingFieldCount = value + case float64: + f := int64(v) + s.MissingFieldCount = f + } + + case "out_of_order_timestamp_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OutOfOrderTimestampCount = value + case float64: + f := int64(v) + s.OutOfOrderTimestampCount = f + } + + case "processed_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ProcessedFieldCount = value + case float64: + f := int64(v) + s.ProcessedFieldCount = f + } + + case "processed_record_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ProcessedRecordCount = value + case float64: + f := int64(v) + s.ProcessedRecordCount = f + } + + case "sparse_bucket_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SparseBucketCount = value + case float64: + f := int64(v) + s.SparseBucketCount = f + } + + } + } + return nil +} + // NewDataCounts returns a DataCounts. func NewDataCounts() *DataCounts { r := &DataCounts{} diff --git a/typedapi/types/datadescription.go b/typedapi/types/datadescription.go old mode 100755 new mode 100644 index 629611001f..904d1bd85f --- a/typedapi/types/datadescription.go +++ b/typedapi/types/datadescription.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataDescription type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L151-L167 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L151-L167 type DataDescription struct { FieldDelimiter *string `json:"field_delimiter,omitempty"` // Format Only JSON format is supported at this time. @@ -41,6 +49,55 @@ type DataDescription struct { TimeFormat *string `json:"time_format,omitempty"` } +func (s *DataDescription) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field_delimiter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FieldDelimiter = &o + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "time_field": + if err := dec.Decode(&s.TimeField); err != nil { + return err + } + + case "time_format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TimeFormat = &o + + } + } + return nil +} + // NewDataDescription returns a DataDescription. func NewDataDescription() *DataDescription { r := &DataDescription{} diff --git a/typedapi/types/dataemailattachment.go b/typedapi/types/dataemailattachment.go old mode 100755 new mode 100644 index 1376a18b91..75fc555fa4 --- a/typedapi/types/dataemailattachment.go +++ b/typedapi/types/dataemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DataEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L234-L236 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L234-L236 type DataEmailAttachment struct { Format *dataattachmentformat.DataAttachmentFormat `json:"format,omitempty"` } diff --git a/typedapi/types/datafeedauthorization.go b/typedapi/types/datafeedauthorization.go old mode 100755 new mode 100644 index 69aebe7f6e..d7e514f34c --- a/typedapi/types/datafeedauthorization.go +++ b/typedapi/types/datafeedauthorization.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DatafeedAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Authorization.ts#L31-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Authorization.ts#L31-L43 type DatafeedAuthorization struct { // ApiKey If an API key was used for the most recent update to the datafeed, its name // and identifier are listed in the response. diff --git a/typedapi/types/datafeedconfig.go b/typedapi/types/datafeedconfig.go old mode 100755 new mode 100644 index cf02a7c699..c266663011 --- a/typedapi/types/datafeedconfig.go +++ b/typedapi/types/datafeedconfig.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DatafeedConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L60-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L60-L117 type DatafeedConfig struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations // is limited and should be used only with low cardinality data. @@ -81,7 +91,7 @@ type DatafeedConfig struct { // performance when there are multiple jobs running on the same node. QueryDelay Duration `json:"query_delay,omitempty"` // RuntimeMappings Specifies runtime fields for the datafeed search. - RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings RuntimeFields `json:"runtime_mappings,omitempty"` // ScriptFields Specifies scripts that evaluate custom expressions and returns script fields // to the datafeed. The detector configuration objects in a job can contain // functions that use these script fields. @@ -92,6 +102,124 @@ type DatafeedConfig struct { ScrollSize *int `json:"scroll_size,omitempty"` } +func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations", "aggs": + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregations, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return err + } + + case "chunking_config": + if err := dec.Decode(&s.ChunkingConfig); err != nil { + return err + } + + case "datafeed_id": + if err := dec.Decode(&s.DatafeedId); err != nil { + return err + } + + case "delayed_data_check_config": + if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { + return err + } + + case "frequency": + if err := dec.Decode(&s.Frequency); err != nil { + return err + } + + case "indices", "indexes": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "indices_options": + if err := dec.Decode(&s.IndicesOptions); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "max_empty_searches": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxEmptySearches = &value + case float64: + f := int(v) + s.MaxEmptySearches = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "query_delay": + if err := dec.Decode(&s.QueryDelay); err != nil { + return err + } + + case "runtime_mappings": + if err := dec.Decode(&s.RuntimeMappings); err != nil { + return err + } + + case "script_fields": + if s.ScriptFields == nil { + s.ScriptFields = make(map[string]ScriptField, 0) + } + if err := dec.Decode(&s.ScriptFields); err != nil { + return err + } + + case "scroll_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ScrollSize = &value + case float64: + f := int(v) + s.ScrollSize = &f + } + + } + } + return nil +} + // NewDatafeedConfig returns a DatafeedConfig. func NewDatafeedConfig() *DatafeedConfig { r := &DatafeedConfig{ diff --git a/typedapi/types/datafeedrunningstate.go b/typedapi/types/datafeedrunningstate.go old mode 100755 new mode 100644 index 26a4fcef57..eae9d918fb --- a/typedapi/types/datafeedrunningstate.go +++ b/typedapi/types/datafeedrunningstate.go @@ -16,19 +16,82 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DatafeedRunningState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L158-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L158-L162 type DatafeedRunningState struct { RealTimeConfigured bool `json:"real_time_configured"` RealTimeRunning bool `json:"real_time_running"` SearchInterval *RunningStateSearchInterval `json:"search_interval,omitempty"` } +func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "real_time_configured": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RealTimeConfigured = value + case bool: + s.RealTimeConfigured = v + } + + case "real_time_running": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RealTimeRunning = value + case bool: + s.RealTimeRunning = v + } + + case "search_interval": + if err := dec.Decode(&s.SearchInterval); err != nil { + return err + } + + } + } + return nil +} + // NewDatafeedRunningState returns a DatafeedRunningState. func NewDatafeedRunningState() *DatafeedRunningState { r := &DatafeedRunningState{} diff --git a/typedapi/types/datafeeds.go b/typedapi/types/datafeeds.go old mode 100755 new mode 100644 index 71ea45986a..75e57b9657 --- a/typedapi/types/datafeeds.go +++ b/typedapi/types/datafeeds.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Datafeeds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/types.ts#L40-L42 type Datafeeds struct { ScrollSize int `json:"scroll_size"` } +func (s *Datafeeds) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "scroll_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ScrollSize = value + case float64: + f := int(v) + s.ScrollSize = f + } + + } + } + return nil +} + // NewDatafeeds returns a Datafeeds. func NewDatafeeds() *Datafeeds { r := &Datafeeds{} diff --git a/typedapi/types/datafeedsrecord.go b/typedapi/types/datafeedsrecord.go old mode 100755 new mode 100644 index 016951fba8..7589e9f4ec --- a/typedapi/types/datafeedsrecord.go +++ b/typedapi/types/datafeedsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DatafeedsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_datafeeds/types.ts#L22-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_datafeeds/types.ts#L22-L83 type DatafeedsRecord struct { // AssignmentExplanation why the datafeed is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` diff --git a/typedapi/types/datafeedstats.go b/typedapi/types/datafeedstats.go old mode 100755 new mode 100644 index e4f072b5e7..da33f2e35e --- a/typedapi/types/datafeedstats.go +++ b/typedapi/types/datafeedstats.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/datafeedstate" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DatafeedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L140-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L140-L147 type DatafeedStats struct { AssignmentExplanation *string `json:"assignment_explanation,omitempty"` DatafeedId string `json:"datafeed_id"` @@ -36,6 +42,59 @@ type DatafeedStats struct { TimingStats DatafeedTimingStats `json:"timing_stats"` } +func (s *DatafeedStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_explanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = &o + + case "datafeed_id": + if err := dec.Decode(&s.DatafeedId); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "running_state": + if err := dec.Decode(&s.RunningState); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "timing_stats": + if err := dec.Decode(&s.TimingStats); err != nil { + return err + } + + } + } + return nil +} + // NewDatafeedStats returns a DatafeedStats. func NewDatafeedStats() *DatafeedStats { r := &DatafeedStats{} diff --git a/typedapi/types/datafeedtimingstats.go b/typedapi/types/datafeedtimingstats.go old mode 100755 new mode 100644 index 02bf9e298a..3d2464cd41 --- a/typedapi/types/datafeedtimingstats.go +++ b/typedapi/types/datafeedtimingstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DatafeedTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L149-L156 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L149-L156 type DatafeedTimingStats struct { AverageSearchTimePerBucketMs Float64 `json:"average_search_time_per_bucket_ms,omitempty"` BucketCount int64 `json:"bucket_count"` @@ -32,6 +42,76 @@ type DatafeedTimingStats struct { TotalSearchTimeMs Float64 `json:"total_search_time_ms"` } +func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "average_search_time_per_bucket_ms": + if err := dec.Decode(&s.AverageSearchTimePerBucketMs); err != nil { + return err + } + + case "bucket_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BucketCount = value + case float64: + f := int64(v) + s.BucketCount = f + } + + case "exponential_average_search_time_per_hour_ms": + if err := dec.Decode(&s.ExponentialAverageSearchTimePerHourMs); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "search_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SearchCount = value + case float64: + f := int64(v) + s.SearchCount = f + } + + case "total_search_time_ms": + if err := dec.Decode(&s.TotalSearchTimeMs); err != nil { + return err + } + + } + } + return nil +} + // NewDatafeedTimingStats returns a DatafeedTimingStats. func NewDatafeedTimingStats() *DatafeedTimingStats { r := &DatafeedTimingStats{} diff --git a/typedapi/types/dataframeanalysis.go b/typedapi/types/dataframeanalysis.go old mode 100755 new mode 100644 index b0045758ff..9687810028 --- a/typedapi/types/dataframeanalysis.go +++ b/typedapi/types/dataframeanalysis.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L134-L213 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L134-L213 type DataframeAnalysis struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -139,6 +149,271 @@ type DataframeAnalysis struct { TrainingPercent Percentage `json:"training_percent,omitempty"` } +func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Alpha = &f + case float64: + f := Float64(v) + s.Alpha = &f + } + + case "dependent_variable": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DependentVariable = o + + case "downsample_factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.DownsampleFactor = &f + case float64: + f := Float64(v) + s.DownsampleFactor = &f + } + + case "early_stopping_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EarlyStoppingEnabled = &value + case bool: + s.EarlyStoppingEnabled = &v + } + + case "eta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Eta = &f + case float64: + f := Float64(v) + s.Eta = &f + } + + case "eta_growth_rate_per_tree": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.EtaGrowthRatePerTree = &f + case float64: + f := Float64(v) + s.EtaGrowthRatePerTree = &f + } + + case "feature_bag_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureBagFraction = &f + case float64: + f := Float64(v) + s.FeatureBagFraction = &f + } + + case "feature_processors": + if err := dec.Decode(&s.FeatureProcessors); err != nil { + return err + } + + case "gamma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gamma = &f + case float64: + f := Float64(v) + s.Gamma = &f + } + + case "lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lambda = &f + case float64: + f := Float64(v) + s.Lambda = &f + } + + case "max_optimization_rounds_per_hyperparameter": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOptimizationRoundsPerHyperparameter = &value + case float64: + f := int(v) + s.MaxOptimizationRoundsPerHyperparameter = &f + } + + case "max_trees", "maximum_number_trees": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTrees = &value + case float64: + f := int(v) + s.MaxTrees = &f + } + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "prediction_field_name": + if err := dec.Decode(&s.PredictionFieldName); err != nil { + return err + } + + case "randomize_seed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RandomizeSeed = &f + case float64: + f := Float64(v) + s.RandomizeSeed = &f + } + + case "soft_tree_depth_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SoftTreeDepthLimit = &value + case float64: + f := int(v) + s.SoftTreeDepthLimit = &f + } + + case "soft_tree_depth_tolerance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.SoftTreeDepthTolerance = &f + case float64: + f := Float64(v) + s.SoftTreeDepthTolerance = &f + } + + case "training_percent": + if err := dec.Decode(&s.TrainingPercent); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysis returns a DataframeAnalysis. func NewDataframeAnalysis() *DataframeAnalysis { r := &DataframeAnalysis{} diff --git a/typedapi/types/dataframeanalysisanalyzedfields.go b/typedapi/types/dataframeanalysisanalyzedfields.go old mode 100755 new mode 100644 index f6f9000514..4a065efda1 --- a/typedapi/types/dataframeanalysisanalyzedfields.go +++ b/typedapi/types/dataframeanalysisanalyzedfields.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalysisAnalyzedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L238-L244 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L238-L244 type DataframeAnalysisAnalyzedFields struct { // Excludes An array of strings that defines the fields that will be included in the // analysis. @@ -33,6 +41,41 @@ type DataframeAnalysisAnalyzedFields struct { Includes []string `json:"includes"` } +func (s *DataframeAnalysisAnalyzedFields) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Includes) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "excludes": + if err := dec.Decode(&s.Excludes); err != nil { + return err + } + + case "includes": + if err := dec.Decode(&s.Includes); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisAnalyzedFields returns a DataframeAnalysisAnalyzedFields. func NewDataframeAnalysisAnalyzedFields() *DataframeAnalysisAnalyzedFields { r := &DataframeAnalysisAnalyzedFields{} diff --git a/typedapi/types/dataframeanalysisclassification.go b/typedapi/types/dataframeanalysisclassification.go old mode 100755 new mode 100644 index 0a88dc3069..7288c77594 --- a/typedapi/types/dataframeanalysisclassification.go +++ b/typedapi/types/dataframeanalysisclassification.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalysisClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L227-L236 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L227-L236 type DataframeAnalysisClassification struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -148,6 +158,295 @@ type DataframeAnalysisClassification struct { TrainingPercent Percentage `json:"training_percent,omitempty"` } +func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Alpha = &f + case float64: + f := Float64(v) + s.Alpha = &f + } + + case "class_assignment_objective": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClassAssignmentObjective = &o + + case "dependent_variable": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DependentVariable = o + + case "downsample_factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.DownsampleFactor = &f + case float64: + f := Float64(v) + s.DownsampleFactor = &f + } + + case "early_stopping_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EarlyStoppingEnabled = &value + case bool: + s.EarlyStoppingEnabled = &v + } + + case "eta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Eta = &f + case float64: + f := Float64(v) + s.Eta = &f + } + + case "eta_growth_rate_per_tree": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.EtaGrowthRatePerTree = &f + case float64: + f := Float64(v) + s.EtaGrowthRatePerTree = &f + } + + case "feature_bag_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureBagFraction = &f + case float64: + f := Float64(v) + s.FeatureBagFraction = &f + } + + case "feature_processors": + if err := dec.Decode(&s.FeatureProcessors); err != nil { + return err + } + + case "gamma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gamma = &f + case float64: + f := Float64(v) + s.Gamma = &f + } + + case "lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lambda = &f + case float64: + f := Float64(v) + s.Lambda = &f + } + + case "max_optimization_rounds_per_hyperparameter": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOptimizationRoundsPerHyperparameter = &value + case float64: + f := int(v) + s.MaxOptimizationRoundsPerHyperparameter = &f + } + + case "max_trees", "maximum_number_trees": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTrees = &value + case float64: + f := int(v) + s.MaxTrees = &f + } + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "prediction_field_name": + if err := dec.Decode(&s.PredictionFieldName); err != nil { + return err + } + + case "randomize_seed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RandomizeSeed = &f + case float64: + f := Float64(v) + s.RandomizeSeed = &f + } + + case "soft_tree_depth_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SoftTreeDepthLimit = &value + case float64: + f := int(v) + s.SoftTreeDepthLimit = &f + } + + case "soft_tree_depth_tolerance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.SoftTreeDepthTolerance = &f + case float64: + f := Float64(v) + s.SoftTreeDepthTolerance = &f + } + + case "training_percent": + if err := dec.Decode(&s.TrainingPercent); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisClassification returns a DataframeAnalysisClassification. func NewDataframeAnalysisClassification() *DataframeAnalysisClassification { r := &DataframeAnalysisClassification{} diff --git a/typedapi/types/dataframeanalysiscontainer.go b/typedapi/types/dataframeanalysiscontainer.go old mode 100755 new mode 100644 index 788541739d..2cf350c597 --- a/typedapi/types/dataframeanalysiscontainer.go +++ b/typedapi/types/dataframeanalysiscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeAnalysisContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L84-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L84-L101 type DataframeAnalysisContainer struct { // Classification The configuration information necessary to perform classification. Classification *DataframeAnalysisClassification `json:"classification,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessor.go b/typedapi/types/dataframeanalysisfeatureprocessor.go old mode 100755 new mode 100644 index eaf15a81c0..df18de6ff8 --- a/typedapi/types/dataframeanalysisfeatureprocessor.go +++ b/typedapi/types/dataframeanalysisfeatureprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeAnalysisFeatureProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L246-L258 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L246-L258 type DataframeAnalysisFeatureProcessor struct { // FrequencyEncoding The configuration information necessary to perform frequency encoding. FrequencyEncoding *DataframeAnalysisFeatureProcessorFrequencyEncoding `json:"frequency_encoding,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go old mode 100755 new mode 100644 index 477a33445c..83b93df45f --- a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalysisFeatureProcessorFrequencyEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L260-L267 type DataframeAnalysisFeatureProcessorFrequencyEncoding struct { // FeatureName The resulting feature name. FeatureName string `json:"feature_name"` @@ -32,6 +40,44 @@ type DataframeAnalysisFeatureProcessorFrequencyEncoding struct { FrequencyMap map[string]Float64 `json:"frequency_map"` } +func (s *DataframeAnalysisFeatureProcessorFrequencyEncoding) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_name": + if err := dec.Decode(&s.FeatureName); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "frequency_map": + if s.FrequencyMap == nil { + s.FrequencyMap = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.FrequencyMap); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisFeatureProcessorFrequencyEncoding returns a DataframeAnalysisFeatureProcessorFrequencyEncoding. func NewDataframeAnalysisFeatureProcessorFrequencyEncoding() *DataframeAnalysisFeatureProcessorFrequencyEncoding { r := &DataframeAnalysisFeatureProcessorFrequencyEncoding{ diff --git a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go old mode 100755 new mode 100644 index bbbd0f475f..b647015ad3 --- a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go @@ -16,18 +16,51 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalysisFeatureProcessorMultiEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L269-L272 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L269-L272 type DataframeAnalysisFeatureProcessorMultiEncoding struct { // Processors The ordered array of custom processors to execute. Must be more than 1. Processors []int `json:"processors"` } +func (s *DataframeAnalysisFeatureProcessorMultiEncoding) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "processors": + if err := dec.Decode(&s.Processors); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisFeatureProcessorMultiEncoding returns a DataframeAnalysisFeatureProcessorMultiEncoding. func NewDataframeAnalysisFeatureProcessorMultiEncoding() *DataframeAnalysisFeatureProcessorMultiEncoding { r := &DataframeAnalysisFeatureProcessorMultiEncoding{} diff --git a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go old mode 100755 new mode 100644 index ba000c98f7..898d6d2f19 --- a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalysisFeatureProcessorNGramEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L274-L286 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L274-L286 type DataframeAnalysisFeatureProcessorNGramEncoding struct { Custom *bool `json:"custom,omitempty"` // FeaturePrefix The feature name prefix. Defaults to ngram__. @@ -40,6 +50,90 @@ type DataframeAnalysisFeatureProcessorNGramEncoding struct { Start *int `json:"start,omitempty"` } +func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "custom": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Custom = &value + case bool: + s.Custom = &v + } + + case "feature_prefix": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeaturePrefix = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Length = &value + case float64: + f := int(v) + s.Length = &f + } + + case "n_grams": + if err := dec.Decode(&s.NGrams); err != nil { + return err + } + + case "start": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Start = &value + case float64: + f := int(v) + s.Start = &f + } + + } + } + return nil +} + // NewDataframeAnalysisFeatureProcessorNGramEncoding returns a DataframeAnalysisFeatureProcessorNGramEncoding. func NewDataframeAnalysisFeatureProcessorNGramEncoding() *DataframeAnalysisFeatureProcessorNGramEncoding { r := &DataframeAnalysisFeatureProcessorNGramEncoding{} diff --git a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go old mode 100755 new mode 100644 index ca716c9b99..5920ad69ac --- a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalysisFeatureProcessorOneHotEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L288-L293 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L288-L293 type DataframeAnalysisFeatureProcessorOneHotEncoding struct { // Field The name of the field to encode. Field string `json:"field"` @@ -30,6 +38,39 @@ type DataframeAnalysisFeatureProcessorOneHotEncoding struct { HotMap string `json:"hot_map"` } +func (s *DataframeAnalysisFeatureProcessorOneHotEncoding) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "hot_map": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HotMap = o + + } + } + return nil +} + // NewDataframeAnalysisFeatureProcessorOneHotEncoding returns a DataframeAnalysisFeatureProcessorOneHotEncoding. func NewDataframeAnalysisFeatureProcessorOneHotEncoding() *DataframeAnalysisFeatureProcessorOneHotEncoding { r := &DataframeAnalysisFeatureProcessorOneHotEncoding{} diff --git a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go old mode 100755 new mode 100644 index 66bbccc417..df4c88a16c --- a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // DataframeAnalysisFeatureProcessorTargetMeanEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L295-L304 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L295-L304 type DataframeAnalysisFeatureProcessorTargetMeanEncoding struct { // DefaultValue The default value if field value is not found in the target_map. DefaultValue int `json:"default_value"` @@ -38,6 +44,60 @@ type DataframeAnalysisFeatureProcessorTargetMeanEncoding struct { TargetMap map[string]json.RawMessage `json:"target_map"` } +func (s *DataframeAnalysisFeatureProcessorTargetMeanEncoding) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "default_value": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DefaultValue = value + case float64: + f := int(v) + s.DefaultValue = f + } + + case "feature_name": + if err := dec.Decode(&s.FeatureName); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "target_map": + if s.TargetMap == nil { + s.TargetMap = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.TargetMap); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisFeatureProcessorTargetMeanEncoding returns a DataframeAnalysisFeatureProcessorTargetMeanEncoding. func NewDataframeAnalysisFeatureProcessorTargetMeanEncoding() *DataframeAnalysisFeatureProcessorTargetMeanEncoding { r := &DataframeAnalysisFeatureProcessorTargetMeanEncoding{ diff --git a/typedapi/types/dataframeanalysisoutlierdetection.go b/typedapi/types/dataframeanalysisoutlierdetection.go old mode 100755 new mode 100644 index 66bbe6745d..c6cd9021f1 --- a/typedapi/types/dataframeanalysisoutlierdetection.go +++ b/typedapi/types/dataframeanalysisoutlierdetection.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalysisOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L103-L132 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L103-L132 type DataframeAnalysisOutlierDetection struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` @@ -50,6 +60,110 @@ type DataframeAnalysisOutlierDetection struct { StandardizationEnabled *bool `json:"standardization_enabled,omitempty"` } +func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compute_feature_influence": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ComputeFeatureInfluence = &value + case bool: + s.ComputeFeatureInfluence = &v + } + + case "feature_influence_threshold": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureInfluenceThreshold = &f + case float64: + f := Float64(v) + s.FeatureInfluenceThreshold = &f + } + + case "method": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Method = &o + + case "n_neighbors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NNeighbors = &value + case float64: + f := int(v) + s.NNeighbors = &f + } + + case "outlier_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.OutlierFraction = &f + case float64: + f := Float64(v) + s.OutlierFraction = &f + } + + case "standardization_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StandardizationEnabled = &value + case bool: + s.StandardizationEnabled = &v + } + + } + } + return nil +} + // NewDataframeAnalysisOutlierDetection returns a DataframeAnalysisOutlierDetection. func NewDataframeAnalysisOutlierDetection() *DataframeAnalysisOutlierDetection { r := &DataframeAnalysisOutlierDetection{} diff --git a/typedapi/types/dataframeanalysisregression.go b/typedapi/types/dataframeanalysisregression.go old mode 100755 new mode 100644 index 61f3b1a3ed..44df286583 --- a/typedapi/types/dataframeanalysisregression.go +++ b/typedapi/types/dataframeanalysisregression.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalysisRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L215-L225 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L215-L225 type DataframeAnalysisRegression struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -145,6 +155,295 @@ type DataframeAnalysisRegression struct { TrainingPercent Percentage `json:"training_percent,omitempty"` } +func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Alpha = &f + case float64: + f := Float64(v) + s.Alpha = &f + } + + case "dependent_variable": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DependentVariable = o + + case "downsample_factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.DownsampleFactor = &f + case float64: + f := Float64(v) + s.DownsampleFactor = &f + } + + case "early_stopping_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EarlyStoppingEnabled = &value + case bool: + s.EarlyStoppingEnabled = &v + } + + case "eta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Eta = &f + case float64: + f := Float64(v) + s.Eta = &f + } + + case "eta_growth_rate_per_tree": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.EtaGrowthRatePerTree = &f + case float64: + f := Float64(v) + s.EtaGrowthRatePerTree = &f + } + + case "feature_bag_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureBagFraction = &f + case float64: + f := Float64(v) + s.FeatureBagFraction = &f + } + + case "feature_processors": + if err := dec.Decode(&s.FeatureProcessors); err != nil { + return err + } + + case "gamma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gamma = &f + case float64: + f := Float64(v) + s.Gamma = &f + } + + case "lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lambda = &f + case float64: + f := Float64(v) + s.Lambda = &f + } + + case "loss_function": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LossFunction = &o + + case "loss_function_parameter": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.LossFunctionParameter = &f + case float64: + f := Float64(v) + s.LossFunctionParameter = &f + } + + case "max_optimization_rounds_per_hyperparameter": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOptimizationRoundsPerHyperparameter = &value + case float64: + f := int(v) + s.MaxOptimizationRoundsPerHyperparameter = &f + } + + case "max_trees", "maximum_number_trees": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTrees = &value + case float64: + f := int(v) + s.MaxTrees = &f + } + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "prediction_field_name": + if err := dec.Decode(&s.PredictionFieldName); err != nil { + return err + } + + case "randomize_seed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RandomizeSeed = &f + case float64: + f := Float64(v) + s.RandomizeSeed = &f + } + + case "soft_tree_depth_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SoftTreeDepthLimit = &value + case float64: + f := int(v) + s.SoftTreeDepthLimit = &f + } + + case "soft_tree_depth_tolerance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.SoftTreeDepthTolerance = &f + case float64: + f := Float64(v) + s.SoftTreeDepthTolerance = &f + } + + case "training_percent": + if err := dec.Decode(&s.TrainingPercent); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalysisRegression returns a DataframeAnalysisRegression. func NewDataframeAnalysisRegression() *DataframeAnalysisRegression { r := &DataframeAnalysisRegression{} diff --git a/typedapi/types/dataframeanalytics.go b/typedapi/types/dataframeanalytics.go old mode 100755 new mode 100644 index a7ab2244a7..b30d794463 --- a/typedapi/types/dataframeanalytics.go +++ b/typedapi/types/dataframeanalytics.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/dataframestate" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DataframeAnalytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L324-L341 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L324-L341 type DataframeAnalytics struct { // AnalysisStats An object containing information about the analysis job. AnalysisStats *DataframeAnalyticsStatsContainer `json:"analysis_stats,omitempty"` @@ -51,6 +57,69 @@ type DataframeAnalytics struct { State dataframestate.DataframeState `json:"state"` } +func (s *DataframeAnalytics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analysis_stats": + if err := dec.Decode(&s.AnalysisStats); err != nil { + return err + } + + case "assignment_explanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = &o + + case "data_counts": + if err := dec.Decode(&s.DataCounts); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "memory_usage": + if err := dec.Decode(&s.MemoryUsage); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "progress": + if err := dec.Decode(&s.Progress); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalytics returns a DataframeAnalytics. func NewDataframeAnalytics() *DataframeAnalytics { r := &DataframeAnalytics{} diff --git a/typedapi/types/dataframeanalyticsauthorization.go b/typedapi/types/dataframeanalyticsauthorization.go old mode 100755 new mode 100644 index 4f4a710fd1..c2f22602dc --- a/typedapi/types/dataframeanalyticsauthorization.go +++ b/typedapi/types/dataframeanalyticsauthorization.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeAnalyticsAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Authorization.ts#L45-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Authorization.ts#L45-L57 type DataframeAnalyticsAuthorization struct { // ApiKey If an API key was used for the most recent update to the job, its name and // identifier are listed in the response. diff --git a/typedapi/types/dataframeanalyticsdestination.go b/typedapi/types/dataframeanalyticsdestination.go old mode 100755 new mode 100644 index 6ecebb2f61..7624cfbc71 --- a/typedapi/types/dataframeanalyticsdestination.go +++ b/typedapi/types/dataframeanalyticsdestination.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalyticsDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L77-L82 type DataframeAnalyticsDestination struct { // Index Defines the destination index to store the results of the data frame // analytics job. @@ -32,6 +40,36 @@ type DataframeAnalyticsDestination struct { ResultsField *string `json:"results_field,omitempty"` } +func (s *DataframeAnalyticsDestination) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "results_field": + if err := dec.Decode(&s.ResultsField); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsDestination returns a DataframeAnalyticsDestination. func NewDataframeAnalyticsDestination() *DataframeAnalyticsDestination { r := &DataframeAnalyticsDestination{} diff --git a/typedapi/types/dataframeanalyticsfieldselection.go b/typedapi/types/dataframeanalyticsfieldselection.go old mode 100755 new mode 100644 index fcf98c1983..59fc4426b2 --- a/typedapi/types/dataframeanalyticsfieldselection.go +++ b/typedapi/types/dataframeanalyticsfieldselection.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsFieldSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L55-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L55-L68 type DataframeAnalyticsFieldSelection struct { // FeatureType The feature type of this field for the analysis. May be categorical or // numerical. @@ -39,6 +49,80 @@ type DataframeAnalyticsFieldSelection struct { Reason *string `json:"reason,omitempty"` } +func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureType = &o + + case "is_included": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsIncluded = value + case bool: + s.IsIncluded = v + } + + case "is_required": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsRequired = value + case bool: + s.IsRequired = v + } + + case "mapping_types": + if err := dec.Decode(&s.MappingTypes); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + } + } + return nil +} + // NewDataframeAnalyticsFieldSelection returns a DataframeAnalyticsFieldSelection. func NewDataframeAnalyticsFieldSelection() *DataframeAnalyticsFieldSelection { r := &DataframeAnalyticsFieldSelection{} diff --git a/typedapi/types/dataframeanalyticsmemoryestimation.go b/typedapi/types/dataframeanalyticsmemoryestimation.go old mode 100755 new mode 100644 index 453909054d..c2af460ddd --- a/typedapi/types/dataframeanalyticsmemoryestimation.go +++ b/typedapi/types/dataframeanalyticsmemoryestimation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeAnalyticsMemoryEstimation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L70-L75 type DataframeAnalyticsMemoryEstimation struct { // ExpectedMemoryWithDisk Estimated memory usage under the assumption that overflowing to disk is // allowed during data frame analytics. expected_memory_with_disk is usually diff --git a/typedapi/types/dataframeanalyticsrecord.go b/typedapi/types/dataframeanalyticsrecord.go old mode 100755 new mode 100644 index 311a921f96..7c0cc2150f --- a/typedapi/types/dataframeanalyticsrecord.go +++ b/typedapi/types/dataframeanalyticsrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataFrameAnalyticsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 type DataFrameAnalyticsRecord struct { // AssignmentExplanation why the job is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` @@ -58,6 +66,133 @@ type DataFrameAnalyticsRecord struct { Version *string `json:"version,omitempty"` } +func (s *DataFrameAnalyticsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_explanation", "ae", "assignmentExplanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = &o + + case "create_time", "ct", "createTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreateTime = &o + + case "description", "d": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "dest_index", "di", "destIndex": + if err := dec.Decode(&s.DestIndex); err != nil { + return err + } + + case "failure_reason", "fr", "failureReason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FailureReason = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "model_memory_limit", "mml", "modelMemoryLimit": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelMemoryLimit = &o + + case "node.address", "na", "nodeAddress": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeAddress = &o + + case "node.ephemeral_id", "ne", "nodeEphemeralId": + if err := dec.Decode(&s.NodeEphemeralId); err != nil { + return err + } + + case "node.id", "ni", "nodeId": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "node.name", "nn", "nodeName": + if err := dec.Decode(&s.NodeName); err != nil { + return err + } + + case "progress", "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Progress = &o + + case "source_index", "si", "sourceIndex": + if err := dec.Decode(&s.SourceIndex); err != nil { + return err + } + + case "state", "s": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = &o + + case "type", "t": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewDataFrameAnalyticsRecord returns a DataFrameAnalyticsRecord. func NewDataFrameAnalyticsRecord() *DataFrameAnalyticsRecord { r := &DataFrameAnalyticsRecord{} diff --git a/typedapi/types/dataframeanalyticssource.go b/typedapi/types/dataframeanalyticssource.go old mode 100755 new mode 100644 index fc571da9be..f899edc730 --- a/typedapi/types/dataframeanalyticssource.go +++ b/typedapi/types/dataframeanalyticssource.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalyticsSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L39-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L39-L53 type DataframeAnalyticsSource struct { // Index Index or indices on which to perform the analysis. It can be a single index // or index pattern as well as an array of indices or patterns. NOTE: If your @@ -37,13 +45,64 @@ type DataframeAnalyticsSource struct { Query *Query `json:"query,omitempty"` // RuntimeMappings Definitions of runtime fields that will become part of the mapping of the // destination index. - RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings RuntimeFields `json:"runtime_mappings,omitempty"` // Source_ Specify `includes` and/or `excludes patterns to select which fields will be // present in the destination. Fields that are excluded cannot be included in // the analysis. Source_ *DataframeAnalysisAnalyzedFields `json:"_source,omitempty"` } +func (s *DataframeAnalyticsSource) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Index = append(s.Index, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { + return err + } + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "runtime_mappings": + if err := dec.Decode(&s.RuntimeMappings); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsSource returns a DataframeAnalyticsSource. func NewDataframeAnalyticsSource() *DataframeAnalyticsSource { r := &DataframeAnalyticsSource{} diff --git a/typedapi/types/dataframeanalyticsstatscontainer.go b/typedapi/types/dataframeanalyticsstatscontainer.go old mode 100755 new mode 100644 index daea97aedc..8ecbbc2534 --- a/typedapi/types/dataframeanalyticsstatscontainer.go +++ b/typedapi/types/dataframeanalyticsstatscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeAnalyticsStatsContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L370-L378 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L370-L378 type DataframeAnalyticsStatsContainer struct { // ClassificationStats An object containing information about the classification analysis job. ClassificationStats *DataframeAnalyticsStatsHyperparameters `json:"classification_stats,omitempty"` diff --git a/typedapi/types/dataframeanalyticsstatsdatacounts.go b/typedapi/types/dataframeanalyticsstatsdatacounts.go old mode 100755 new mode 100644 index f7da73e29f..6e764c34a9 --- a/typedapi/types/dataframeanalyticsstatsdatacounts.go +++ b/typedapi/types/dataframeanalyticsstatsdatacounts.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsStatsDataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L361-L368 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L361-L368 type DataframeAnalyticsStatsDataCounts struct { // SkippedDocsCount The number of documents that are skipped during the analysis because they // contained values that are not supported by the analysis. For example, outlier @@ -37,6 +47,74 @@ type DataframeAnalyticsStatsDataCounts struct { TrainingDocsCount int `json:"training_docs_count"` } +func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "skipped_docs_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SkippedDocsCount = value + case float64: + f := int(v) + s.SkippedDocsCount = f + } + + case "test_docs_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TestDocsCount = value + case float64: + f := int(v) + s.TestDocsCount = f + } + + case "training_docs_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TrainingDocsCount = value + case float64: + f := int(v) + s.TrainingDocsCount = f + } + + } + } + return nil +} + // NewDataframeAnalyticsStatsDataCounts returns a DataframeAnalyticsStatsDataCounts. func NewDataframeAnalyticsStatsDataCounts() *DataframeAnalyticsStatsDataCounts { r := &DataframeAnalyticsStatsDataCounts{} diff --git a/typedapi/types/dataframeanalyticsstatshyperparameters.go b/typedapi/types/dataframeanalyticsstatshyperparameters.go old mode 100755 new mode 100644 index f20968ab6a..ce9eb43718 --- a/typedapi/types/dataframeanalyticsstatshyperparameters.go +++ b/typedapi/types/dataframeanalyticsstatshyperparameters.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsStatsHyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L380-L387 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L380-L387 type DataframeAnalyticsStatsHyperparameters struct { Hyperparameters Hyperparameters `json:"hyperparameters"` // Iteration The number of iterations on the analysis. @@ -32,6 +42,62 @@ type DataframeAnalyticsStatsHyperparameters struct { ValidationLoss ValidationLoss `json:"validation_loss"` } +func (s *DataframeAnalyticsStatsHyperparameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hyperparameters": + if err := dec.Decode(&s.Hyperparameters); err != nil { + return err + } + + case "iteration": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Iteration = value + case float64: + f := int(v) + s.Iteration = f + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timing_stats": + if err := dec.Decode(&s.TimingStats); err != nil { + return err + } + + case "validation_loss": + if err := dec.Decode(&s.ValidationLoss); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsStatsHyperparameters returns a DataframeAnalyticsStatsHyperparameters. func NewDataframeAnalyticsStatsHyperparameters() *DataframeAnalyticsStatsHyperparameters { r := &DataframeAnalyticsStatsHyperparameters{} diff --git a/typedapi/types/dataframeanalyticsstatsmemoryusage.go b/typedapi/types/dataframeanalyticsstatsmemoryusage.go old mode 100755 new mode 100644 index b2e3a5106f..f1a5021d3d --- a/typedapi/types/dataframeanalyticsstatsmemoryusage.go +++ b/typedapi/types/dataframeanalyticsstatsmemoryusage.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsStatsMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L350-L359 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L350-L359 type DataframeAnalyticsStatsMemoryUsage struct { // MemoryReestimateBytes This value is present when the status is hard_limit and it is a new estimate // of how much memory the job needs. @@ -35,6 +45,69 @@ type DataframeAnalyticsStatsMemoryUsage struct { Timestamp *int64 `json:"timestamp,omitempty"` } +func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "memory_reestimate_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemoryReestimateBytes = &value + case float64: + f := int64(v) + s.MemoryReestimateBytes = &f + } + + case "peak_usage_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PeakUsageBytes = value + case float64: + f := int64(v) + s.PeakUsageBytes = f + } + + case "status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsStatsMemoryUsage returns a DataframeAnalyticsStatsMemoryUsage. func NewDataframeAnalyticsStatsMemoryUsage() *DataframeAnalyticsStatsMemoryUsage { r := &DataframeAnalyticsStatsMemoryUsage{} diff --git a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go old mode 100755 new mode 100644 index 497f1e1359..32a72c1d3b --- a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go +++ b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeAnalyticsStatsOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L389-L393 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L389-L393 type DataframeAnalyticsStatsOutlierDetection struct { Parameters OutlierDetectionParameters `json:"parameters"` Timestamp int64 `json:"timestamp"` TimingStats TimingStats `json:"timing_stats"` } +func (s *DataframeAnalyticsStatsOutlierDetection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "parameters": + if err := dec.Decode(&s.Parameters); err != nil { + return err + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timing_stats": + if err := dec.Decode(&s.TimingStats); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsStatsOutlierDetection returns a DataframeAnalyticsStatsOutlierDetection. func NewDataframeAnalyticsStatsOutlierDetection() *DataframeAnalyticsStatsOutlierDetection { r := &DataframeAnalyticsStatsOutlierDetection{} diff --git a/typedapi/types/dataframeanalyticsstatsprogress.go b/typedapi/types/dataframeanalyticsstatsprogress.go old mode 100755 new mode 100644 index 978b466d3f..6ff002ed71 --- a/typedapi/types/dataframeanalyticsstatsprogress.go +++ b/typedapi/types/dataframeanalyticsstatsprogress.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsStatsProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L343-L348 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L343-L348 type DataframeAnalyticsStatsProgress struct { // Phase Defines the phase of the data frame analytics job. Phase string `json:"phase"` @@ -31,6 +41,50 @@ type DataframeAnalyticsStatsProgress struct { ProgressPercent int `json:"progress_percent"` } +func (s *DataframeAnalyticsStatsProgress) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "phase": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Phase = o + + case "progress_percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ProgressPercent = value + case float64: + f := int(v) + s.ProgressPercent = f + } + + } + } + return nil +} + // NewDataframeAnalyticsStatsProgress returns a DataframeAnalyticsStatsProgress. func NewDataframeAnalyticsStatsProgress() *DataframeAnalyticsStatsProgress { r := &DataframeAnalyticsStatsProgress{} diff --git a/typedapi/types/dataframeanalyticssummary.go b/typedapi/types/dataframeanalyticssummary.go old mode 100755 new mode 100644 index f829c94b1f..9eb9b120d2 --- a/typedapi/types/dataframeanalyticssummary.go +++ b/typedapi/types/dataframeanalyticssummary.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeAnalyticsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L306-L322 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L306-L322 type DataframeAnalyticsSummary struct { AllowLazyStart *bool `json:"allow_lazy_start,omitempty"` Analysis DataframeAnalysisContainer `json:"analysis"` @@ -41,6 +51,112 @@ type DataframeAnalyticsSummary struct { Version *string `json:"version,omitempty"` } +func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_lazy_start": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowLazyStart = &value + case bool: + s.AllowLazyStart = &v + } + + case "analysis": + if err := dec.Decode(&s.Analysis); err != nil { + return err + } + + case "analyzed_fields": + if err := dec.Decode(&s.AnalyzedFields); err != nil { + return err + } + + case "authorization": + if err := dec.Decode(&s.Authorization); err != nil { + return err + } + + case "create_time": + if err := dec.Decode(&s.CreateTime); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "dest": + if err := dec.Decode(&s.Dest); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "max_num_threads": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxNumThreads = &value + case float64: + f := int(v) + s.MaxNumThreads = &f + } + + case "model_memory_limit": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelMemoryLimit = &o + + case "source": + if err := dec.Decode(&s.Source); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeAnalyticsSummary returns a DataframeAnalyticsSummary. func NewDataframeAnalyticsSummary() *DataframeAnalyticsSummary { r := &DataframeAnalyticsSummary{} diff --git a/typedapi/types/dataframeclassificationsummary.go b/typedapi/types/dataframeclassificationsummary.go old mode 100755 new mode 100644 index 1a0aad51b5..3858ead024 --- a/typedapi/types/dataframeclassificationsummary.go +++ b/typedapi/types/dataframeclassificationsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeClassificationSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L31-L37 type DataframeClassificationSummary struct { Accuracy *DataframeClassificationSummaryAccuracy `json:"accuracy,omitempty"` AucRoc *DataframeEvaluationSummaryAucRoc `json:"auc_roc,omitempty"` diff --git a/typedapi/types/dataframeclassificationsummaryaccuracy.go b/typedapi/types/dataframeclassificationsummaryaccuracy.go old mode 100755 new mode 100644 index 0418ff273b..81f3759449 --- a/typedapi/types/dataframeclassificationsummaryaccuracy.go +++ b/typedapi/types/dataframeclassificationsummaryaccuracy.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeClassificationSummaryAccuracy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L70-L73 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L70-L73 type DataframeClassificationSummaryAccuracy struct { Classes []DataframeEvaluationClass `json:"classes"` OverallAccuracy Float64 `json:"overall_accuracy"` } +func (s *DataframeClassificationSummaryAccuracy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + case "overall_accuracy": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.OverallAccuracy = f + case float64: + f := Float64(v) + s.OverallAccuracy = f + } + + } + } + return nil +} + // NewDataframeClassificationSummaryAccuracy returns a DataframeClassificationSummaryAccuracy. func NewDataframeClassificationSummaryAccuracy() *DataframeClassificationSummaryAccuracy { r := &DataframeClassificationSummaryAccuracy{} diff --git a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go old mode 100755 new mode 100644 index d656f7b05a..75e999aaa3 --- a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go +++ b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeClassificationSummaryMulticlassConfusionMatrix type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L79-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L79-L82 type DataframeClassificationSummaryMulticlassConfusionMatrix struct { ConfusionMatrix []ConfusionMatrixItem `json:"confusion_matrix"` OtherActualClassCount int `json:"other_actual_class_count"` } +func (s *DataframeClassificationSummaryMulticlassConfusionMatrix) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "confusion_matrix": + if err := dec.Decode(&s.ConfusionMatrix); err != nil { + return err + } + + case "other_actual_class_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OtherActualClassCount = value + case float64: + f := int(v) + s.OtherActualClassCount = f + } + + } + } + return nil +} + // NewDataframeClassificationSummaryMulticlassConfusionMatrix returns a DataframeClassificationSummaryMulticlassConfusionMatrix. func NewDataframeClassificationSummaryMulticlassConfusionMatrix() *DataframeClassificationSummaryMulticlassConfusionMatrix { r := &DataframeClassificationSummaryMulticlassConfusionMatrix{} diff --git a/typedapi/types/dataframeclassificationsummaryprecision.go b/typedapi/types/dataframeclassificationsummaryprecision.go old mode 100755 new mode 100644 index 4f9e5efa58..883275abff --- a/typedapi/types/dataframeclassificationsummaryprecision.go +++ b/typedapi/types/dataframeclassificationsummaryprecision.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeClassificationSummaryPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L60-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L60-L63 type DataframeClassificationSummaryPrecision struct { AvgPrecision Float64 `json:"avg_precision"` Classes []DataframeEvaluationClass `json:"classes"` } +func (s *DataframeClassificationSummaryPrecision) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg_precision": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.AvgPrecision = f + case float64: + f := Float64(v) + s.AvgPrecision = f + } + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeClassificationSummaryPrecision returns a DataframeClassificationSummaryPrecision. func NewDataframeClassificationSummaryPrecision() *DataframeClassificationSummaryPrecision { r := &DataframeClassificationSummaryPrecision{} diff --git a/typedapi/types/dataframeclassificationsummaryrecall.go b/typedapi/types/dataframeclassificationsummaryrecall.go old mode 100755 new mode 100644 index 186d45a634..f2ac649e02 --- a/typedapi/types/dataframeclassificationsummaryrecall.go +++ b/typedapi/types/dataframeclassificationsummaryrecall.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeClassificationSummaryRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L65-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L65-L68 type DataframeClassificationSummaryRecall struct { AvgRecall Float64 `json:"avg_recall"` Classes []DataframeEvaluationClass `json:"classes"` } +func (s *DataframeClassificationSummaryRecall) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg_recall": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.AvgRecall = f + case float64: + f := Float64(v) + s.AvgRecall = f + } + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeClassificationSummaryRecall returns a DataframeClassificationSummaryRecall. func NewDataframeClassificationSummaryRecall() *DataframeClassificationSummaryRecall { r := &DataframeClassificationSummaryRecall{} diff --git a/typedapi/types/dataframeevaluationclass.go b/typedapi/types/dataframeevaluationclass.go old mode 100755 new mode 100644 index 0b104b770b..b5841a3b20 --- a/typedapi/types/dataframeevaluationclass.go +++ b/typedapi/types/dataframeevaluationclass.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L75-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L75-L77 type DataframeEvaluationClass struct { ClassName string `json:"class_name"` Value Float64 `json:"value"` } +func (s *DataframeEvaluationClass) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + if err := dec.Decode(&s.ClassName); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + } + } + return nil +} + // NewDataframeEvaluationClass returns a DataframeEvaluationClass. func NewDataframeEvaluationClass() *DataframeEvaluationClass { r := &DataframeEvaluationClass{} diff --git a/typedapi/types/dataframeevaluationclassification.go b/typedapi/types/dataframeevaluationclassification.go old mode 100755 new mode 100644 index 0a64f4b138..96ab62daa2 --- a/typedapi/types/dataframeevaluationclassification.go +++ b/typedapi/types/dataframeevaluationclassification.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeEvaluationClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L35-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L35-L44 type DataframeEvaluationClassification struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has @@ -39,6 +47,46 @@ type DataframeEvaluationClassification struct { TopClassesField *string `json:"top_classes_field,omitempty"` } +func (s *DataframeEvaluationClassification) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual_field": + if err := dec.Decode(&s.ActualField); err != nil { + return err + } + + case "metrics": + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + case "predicted_field": + if err := dec.Decode(&s.PredictedField); err != nil { + return err + } + + case "top_classes_field": + if err := dec.Decode(&s.TopClassesField); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeEvaluationClassification returns a DataframeEvaluationClassification. func NewDataframeEvaluationClassification() *DataframeEvaluationClassification { r := &DataframeEvaluationClassification{} diff --git a/typedapi/types/dataframeevaluationclassificationmetrics.go b/typedapi/types/dataframeevaluationclassificationmetrics.go old mode 100755 new mode 100644 index a965376257..d70ca94b6d --- a/typedapi/types/dataframeevaluationclassificationmetrics.go +++ b/typedapi/types/dataframeevaluationclassificationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationClassificationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L73-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L73-L78 type DataframeEvaluationClassificationMetrics struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy map[string]json.RawMessage `json:"accuracy,omitempty"` diff --git a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go old mode 100755 new mode 100644 index d6e16ba1ab..f53b9c220e --- a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go +++ b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationClassificationMetricsAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L85-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L85-L90 type DataframeEvaluationClassificationMetricsAucRoc struct { // ClassName Name of the only class that is treated as positive during AUC ROC // calculation. Other classes are treated as negative ("one-vs-all" strategy). @@ -34,6 +44,45 @@ type DataframeEvaluationClassificationMetricsAucRoc struct { IncludeCurve *bool `json:"include_curve,omitempty"` } +func (s *DataframeEvaluationClassificationMetricsAucRoc) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + if err := dec.Decode(&s.ClassName); err != nil { + return err + } + + case "include_curve": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeCurve = &value + case bool: + s.IncludeCurve = &v + } + + } + } + return nil +} + // NewDataframeEvaluationClassificationMetricsAucRoc returns a DataframeEvaluationClassificationMetricsAucRoc. func NewDataframeEvaluationClassificationMetricsAucRoc() *DataframeEvaluationClassificationMetricsAucRoc { r := &DataframeEvaluationClassificationMetricsAucRoc{} diff --git a/typedapi/types/dataframeevaluationcontainer.go b/typedapi/types/dataframeevaluationcontainer.go old mode 100755 new mode 100644 index 5580b80b0f..3d65943fcb --- a/typedapi/types/dataframeevaluationcontainer.go +++ b/typedapi/types/dataframeevaluationcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeEvaluationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L25-L33 type DataframeEvaluationContainer struct { // Classification Classification evaluation evaluates the results of a classification analysis // which outputs a prediction that identifies to which of the classes each diff --git a/typedapi/types/dataframeevaluationmetrics.go b/typedapi/types/dataframeevaluationmetrics.go old mode 100755 new mode 100644 index e70c00eadb..77ff8bf860 --- a/typedapi/types/dataframeevaluationmetrics.go +++ b/typedapi/types/dataframeevaluationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L64-L71 type DataframeEvaluationMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationoutlierdetection.go b/typedapi/types/dataframeevaluationoutlierdetection.go old mode 100755 new mode 100644 index 7c00d80866..ef6e6831b6 --- a/typedapi/types/dataframeevaluationoutlierdetection.go +++ b/typedapi/types/dataframeevaluationoutlierdetection.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeEvaluationOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L46-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L46-L53 type DataframeEvaluationOutlierDetection struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has @@ -36,6 +44,41 @@ type DataframeEvaluationOutlierDetection struct { PredictedProbabilityField string `json:"predicted_probability_field"` } +func (s *DataframeEvaluationOutlierDetection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual_field": + if err := dec.Decode(&s.ActualField); err != nil { + return err + } + + case "metrics": + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + case "predicted_probability_field": + if err := dec.Decode(&s.PredictedProbabilityField); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeEvaluationOutlierDetection returns a DataframeEvaluationOutlierDetection. func NewDataframeEvaluationOutlierDetection() *DataframeEvaluationOutlierDetection { r := &DataframeEvaluationOutlierDetection{} diff --git a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go old mode 100755 new mode 100644 index 5f68b68222..1ea9007019 --- a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go +++ b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationOutlierDetectionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L80-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L80-L83 type DataframeEvaluationOutlierDetectionMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationregression.go b/typedapi/types/dataframeevaluationregression.go old mode 100755 new mode 100644 index 3cde676e05..b726984f00 --- a/typedapi/types/dataframeevaluationregression.go +++ b/typedapi/types/dataframeevaluationregression.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeEvaluationRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L55-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L55-L62 type DataframeEvaluationRegression struct { // ActualField The field of the index which contains the ground truth. The data type of this // field must be numerical. @@ -36,6 +44,41 @@ type DataframeEvaluationRegression struct { PredictedField string `json:"predicted_field"` } +func (s *DataframeEvaluationRegression) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actual_field": + if err := dec.Decode(&s.ActualField); err != nil { + return err + } + + case "metrics": + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + case "predicted_field": + if err := dec.Decode(&s.PredictedField); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeEvaluationRegression returns a DataframeEvaluationRegression. func NewDataframeEvaluationRegression() *DataframeEvaluationRegression { r := &DataframeEvaluationRegression{} diff --git a/typedapi/types/dataframeevaluationregressionmetrics.go b/typedapi/types/dataframeevaluationregressionmetrics.go old mode 100755 new mode 100644 index ce417d4e08..e729e339b5 --- a/typedapi/types/dataframeevaluationregressionmetrics.go +++ b/typedapi/types/dataframeevaluationregressionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationRegressionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L92-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L92-L110 type DataframeEvaluationRegressionMetrics struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationRegressionMetricsHuber `json:"huber,omitempty"` diff --git a/typedapi/types/dataframeevaluationregressionmetricshuber.go b/typedapi/types/dataframeevaluationregressionmetricshuber.go old mode 100755 new mode 100644 index 9872607c6a..c41a6ff4d8 --- a/typedapi/types/dataframeevaluationregressionmetricshuber.go +++ b/typedapi/types/dataframeevaluationregressionmetricshuber.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationRegressionMetricsHuber type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L117-L120 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L117-L120 type DataframeEvaluationRegressionMetricsHuber struct { // Delta Approximates 1/2 (prediction - actual)2 for values much less than delta and // approximates a straight line with slope delta for values much larger than @@ -30,6 +40,42 @@ type DataframeEvaluationRegressionMetricsHuber struct { Delta *Float64 `json:"delta,omitempty"` } +func (s *DataframeEvaluationRegressionMetricsHuber) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "delta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Delta = &f + case float64: + f := Float64(v) + s.Delta = &f + } + + } + } + return nil +} + // NewDataframeEvaluationRegressionMetricsHuber returns a DataframeEvaluationRegressionMetricsHuber. func NewDataframeEvaluationRegressionMetricsHuber() *DataframeEvaluationRegressionMetricsHuber { r := &DataframeEvaluationRegressionMetricsHuber{} diff --git a/typedapi/types/dataframeevaluationregressionmetricsmsle.go b/typedapi/types/dataframeevaluationregressionmetricsmsle.go old mode 100755 new mode 100644 index dd1a2c0f18..27f84e5d89 --- a/typedapi/types/dataframeevaluationregressionmetricsmsle.go +++ b/typedapi/types/dataframeevaluationregressionmetricsmsle.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationRegressionMetricsMsle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeEvaluation.ts#L112-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeEvaluation.ts#L112-L115 type DataframeEvaluationRegressionMetricsMsle struct { // Offset Defines the transition point at which you switch from minimizing quadratic // error to minimizing quadratic log error. Defaults to 1. Offset *Float64 `json:"offset,omitempty"` } +func (s *DataframeEvaluationRegressionMetricsMsle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Offset = &f + case float64: + f := Float64(v) + s.Offset = &f + } + + } + } + return nil +} + // NewDataframeEvaluationRegressionMetricsMsle returns a DataframeEvaluationRegressionMetricsMsle. func NewDataframeEvaluationRegressionMetricsMsle() *DataframeEvaluationRegressionMetricsMsle { r := &DataframeEvaluationRegressionMetricsMsle{} diff --git a/typedapi/types/dataframeevaluationsummaryaucroc.go b/typedapi/types/dataframeevaluationsummaryaucroc.go old mode 100755 new mode 100644 index d3fb12592d..c68d5e6876 --- a/typedapi/types/dataframeevaluationsummaryaucroc.go +++ b/typedapi/types/dataframeevaluationsummaryaucroc.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationSummaryAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L50-L52 type DataframeEvaluationSummaryAucRoc struct { Curve []DataframeEvaluationSummaryAucRocCurveItem `json:"curve,omitempty"` Value Float64 `json:"value"` } +func (s *DataframeEvaluationSummaryAucRoc) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "curve": + if err := dec.Decode(&s.Curve); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + } + } + return nil +} + // NewDataframeEvaluationSummaryAucRoc returns a DataframeEvaluationSummaryAucRoc. func NewDataframeEvaluationSummaryAucRoc() *DataframeEvaluationSummaryAucRoc { r := &DataframeEvaluationSummaryAucRoc{} diff --git a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go old mode 100755 new mode 100644 index 763b903c45..3d8296b1f1 --- a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go +++ b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationSummaryAucRocCurveItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L54-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L54-L58 type DataframeEvaluationSummaryAucRocCurveItem struct { Fpr Float64 `json:"fpr"` Threshold Float64 `json:"threshold"` Tpr Float64 `json:"tpr"` } +func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fpr": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Fpr = f + case float64: + f := Float64(v) + s.Fpr = f + } + + case "threshold": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Threshold = f + case float64: + f := Float64(v) + s.Threshold = f + } + + case "tpr": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Tpr = f + case float64: + f := Float64(v) + s.Tpr = f + } + + } + } + return nil +} + // NewDataframeEvaluationSummaryAucRocCurveItem returns a DataframeEvaluationSummaryAucRocCurveItem. func NewDataframeEvaluationSummaryAucRocCurveItem() *DataframeEvaluationSummaryAucRocCurveItem { r := &DataframeEvaluationSummaryAucRocCurveItem{} diff --git a/typedapi/types/dataframeevaluationvalue.go b/typedapi/types/dataframeevaluationvalue.go old mode 100755 new mode 100644 index db1b543880..ade3b0fac6 --- a/typedapi/types/dataframeevaluationvalue.go +++ b/typedapi/types/dataframeevaluationvalue.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframeEvaluationValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L46-L48 type DataframeEvaluationValue struct { Value Float64 `json:"value"` } +func (s *DataframeEvaluationValue) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + } + } + return nil +} + // NewDataframeEvaluationValue returns a DataframeEvaluationValue. func NewDataframeEvaluationValue() *DataframeEvaluationValue { r := &DataframeEvaluationValue{} diff --git a/typedapi/types/dataframeoutlierdetectionsummary.go b/typedapi/types/dataframeoutlierdetectionsummary.go old mode 100755 new mode 100644 index 6965d4cf01..dba0b6494d --- a/typedapi/types/dataframeoutlierdetectionsummary.go +++ b/typedapi/types/dataframeoutlierdetectionsummary.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataframeOutlierDetectionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L24-L29 type DataframeOutlierDetectionSummary struct { AucRoc *DataframeEvaluationSummaryAucRoc `json:"auc_roc,omitempty"` ConfusionMatrix map[string]ConfusionMatrixThreshold `json:"confusion_matrix,omitempty"` @@ -30,6 +38,55 @@ type DataframeOutlierDetectionSummary struct { Recall map[string]Float64 `json:"recall,omitempty"` } +func (s *DataframeOutlierDetectionSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auc_roc": + if err := dec.Decode(&s.AucRoc); err != nil { + return err + } + + case "confusion_matrix": + if s.ConfusionMatrix == nil { + s.ConfusionMatrix = make(map[string]ConfusionMatrixThreshold, 0) + } + if err := dec.Decode(&s.ConfusionMatrix); err != nil { + return err + } + + case "precision": + if s.Precision == nil { + s.Precision = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Precision); err != nil { + return err + } + + case "recall": + if s.Recall == nil { + s.Recall = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Recall); err != nil { + return err + } + + } + } + return nil +} + // NewDataframeOutlierDetectionSummary returns a DataframeOutlierDetectionSummary. func NewDataframeOutlierDetectionSummary() *DataframeOutlierDetectionSummary { r := &DataframeOutlierDetectionSummary{ diff --git a/typedapi/types/dataframepreviewconfig.go b/typedapi/types/dataframepreviewconfig.go old mode 100755 new mode 100644 index 2ad6d89150..c54d86ed5d --- a/typedapi/types/dataframepreviewconfig.go +++ b/typedapi/types/dataframepreviewconfig.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataframePreviewConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 type DataframePreviewConfig struct { Analysis DataframeAnalysisContainer `json:"analysis"` AnalyzedFields *DataframeAnalysisAnalyzedFields `json:"analyzed_fields,omitempty"` @@ -31,6 +41,65 @@ type DataframePreviewConfig struct { Source DataframeAnalyticsSource `json:"source"` } +func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analysis": + if err := dec.Decode(&s.Analysis); err != nil { + return err + } + + case "analyzed_fields": + if err := dec.Decode(&s.AnalyzedFields); err != nil { + return err + } + + case "max_num_threads": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxNumThreads = &value + case float64: + f := int(v) + s.MaxNumThreads = &f + } + + case "model_memory_limit": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelMemoryLimit = &o + + case "source": + if err := dec.Decode(&s.Source); err != nil { + return err + } + + } + } + return nil +} + // NewDataframePreviewConfig returns a DataframePreviewConfig. func NewDataframePreviewConfig() *DataframePreviewConfig { r := &DataframePreviewConfig{} diff --git a/typedapi/types/dataframeregressionsummary.go b/typedapi/types/dataframeregressionsummary.go old mode 100755 new mode 100644 index 0f8df4e7cc..1bdaa6d4ea --- a/typedapi/types/dataframeregressionsummary.go +++ b/typedapi/types/dataframeregressionsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataframeRegressionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/evaluate_data_frame/types.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/evaluate_data_frame/types.ts#L39-L44 type DataframeRegressionSummary struct { Huber *DataframeEvaluationValue `json:"huber,omitempty"` Mse *DataframeEvaluationValue `json:"mse,omitempty"` diff --git a/typedapi/types/datapathstats.go b/typedapi/types/datapathstats.go old mode 100755 new mode 100644 index 52ef843f6e..ed9799a729 --- a/typedapi/types/datapathstats.go +++ b/typedapi/types/datapathstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataPathStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L229-L246 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L229-L246 type DataPathStats struct { Available *string `json:"available,omitempty"` AvailableInBytes *int64 `json:"available_in_bytes,omitempty"` @@ -42,6 +52,203 @@ type DataPathStats struct { Type *string `json:"type,omitempty"` } +func (s *DataPathStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Available = &o + + case "available_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvailableInBytes = &value + case float64: + f := int64(v) + s.AvailableInBytes = &f + } + + case "disk_queue": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DiskQueue = &o + + case "disk_read_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DiskReadSize = &o + + case "disk_read_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DiskReadSizeInBytes = &value + case float64: + f := int64(v) + s.DiskReadSizeInBytes = &f + } + + case "disk_reads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DiskReads = &value + case float64: + f := int64(v) + s.DiskReads = &f + } + + case "disk_write_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DiskWriteSize = &o + + case "disk_write_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DiskWriteSizeInBytes = &value + case float64: + f := int64(v) + s.DiskWriteSizeInBytes = &f + } + + case "disk_writes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DiskWrites = &value + case float64: + f := int64(v) + s.DiskWrites = &f + } + + case "free": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Free = &o + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = &value + case float64: + f := int64(v) + s.FreeInBytes = &f + } + + case "mount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Mount = &o + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = &o + + case "total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Total = &o + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = &value + case float64: + f := int64(v) + s.TotalInBytes = &f + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + } + } + return nil +} + // NewDataPathStats returns a DataPathStats. func NewDataPathStats() *DataPathStats { r := &DataPathStats{} diff --git a/typedapi/types/datastream.go b/typedapi/types/datastream.go old mode 100755 new mode 100644 index 260ce6d427..adca37159b --- a/typedapi/types/datastream.go +++ b/typedapi/types/datastream.go @@ -16,32 +16,165 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // DataStream type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/DataStream.ts#L31-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/DataStream.ts#L31-L46 type DataStream struct { - AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` - Generation int `json:"generation"` - Hidden bool `json:"hidden"` - IlmPolicy *string `json:"ilm_policy,omitempty"` - Indices []DataStreamIndex `json:"indices"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` - Name string `json:"name"` - Replicated *bool `json:"replicated,omitempty"` - Status healthstatus.HealthStatus `json:"status"` - System *bool `json:"system,omitempty"` - Template string `json:"template"` - TimestampField DataStreamTimestampField `json:"timestamp_field"` + AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` + Generation int `json:"generation"` + Hidden bool `json:"hidden"` + IlmPolicy *string `json:"ilm_policy,omitempty"` + Indices []DataStreamIndex `json:"indices"` + Meta_ Metadata `json:"_meta,omitempty"` + Name string `json:"name"` + Replicated *bool `json:"replicated,omitempty"` + Status healthstatus.HealthStatus `json:"status"` + System *bool `json:"system,omitempty"` + Template string `json:"template"` + TimestampField DataStreamTimestampField `json:"timestamp_field"` +} + +func (s *DataStream) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_custom_routing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowCustomRouting = &value + case bool: + s.AllowCustomRouting = &v + } + + case "generation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Generation = value + case float64: + f := int(v) + s.Generation = f + } + + case "hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Hidden = value + case bool: + s.Hidden = v + } + + case "ilm_policy": + if err := dec.Decode(&s.IlmPolicy); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "replicated": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Replicated = &value + case bool: + s.Replicated = &v + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "system": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.System = &value + case bool: + s.System = &v + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + case "timestamp_field": + if err := dec.Decode(&s.TimestampField); err != nil { + return err + } + + } + } + return nil } // NewDataStream returns a DataStream. diff --git a/typedapi/types/datastreamindex.go b/typedapi/types/datastreamindex.go old mode 100755 new mode 100644 index 79cd1eee4c..2ee24c14f7 --- a/typedapi/types/datastreamindex.go +++ b/typedapi/types/datastreamindex.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataStreamIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/DataStream.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/DataStream.ts#L52-L55 type DataStreamIndex struct { IndexName string `json:"index_name"` IndexUuid string `json:"index_uuid"` } +func (s *DataStreamIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index_name": + if err := dec.Decode(&s.IndexName); err != nil { + return err + } + + case "index_uuid": + if err := dec.Decode(&s.IndexUuid); err != nil { + return err + } + + } + } + return nil +} + // NewDataStreamIndex returns a DataStreamIndex. func NewDataStreamIndex() *DataStreamIndex { r := &DataStreamIndex{} diff --git a/typedapi/types/datastreamnames.go b/typedapi/types/datastreamnames.go old mode 100755 new mode 100644 index 47107079e1..5084206419 --- a/typedapi/types/datastreamnames.go +++ b/typedapi/types/datastreamnames.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DataStreamNames type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L86-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L86-L86 type DataStreamNames []string diff --git a/typedapi/types/datastreams.go b/typedapi/types/datastreams.go old mode 100755 new mode 100644 index c948823c15..fd227ae12e --- a/typedapi/types/datastreams.go +++ b/typedapi/types/datastreams.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataStreams type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L81-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L81-L84 type DataStreams struct { Available bool `json:"available"` DataStreams int64 `json:"data_streams"` @@ -30,6 +40,84 @@ type DataStreams struct { IndicesCount int64 `json:"indices_count"` } +func (s *DataStreams) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "data_streams": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DataStreams = value + case float64: + f := int64(v) + s.DataStreams = f + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "indices_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndicesCount = value + case float64: + f := int64(v) + s.IndicesCount = f + } + + } + } + return nil +} + // NewDataStreams returns a DataStreams. func NewDataStreams() *DataStreams { r := &DataStreams{} diff --git a/typedapi/types/datastreamsstatsitem.go b/typedapi/types/datastreamsstatsitem.go old mode 100755 new mode 100644 index 6471624968..40612ac8a8 --- a/typedapi/types/datastreamsstatsitem.go +++ b/typedapi/types/datastreamsstatsitem.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataStreamsStatsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L36-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L36-L42 type DataStreamsStatsItem struct { BackingIndices int `json:"backing_indices"` DataStream string `json:"data_stream"` @@ -31,6 +41,73 @@ type DataStreamsStatsItem struct { StoreSizeBytes int `json:"store_size_bytes"` } +func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "backing_indices": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BackingIndices = value + case float64: + f := int(v) + s.BackingIndices = f + } + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return err + } + + case "maximum_timestamp": + if err := dec.Decode(&s.MaximumTimestamp); err != nil { + return err + } + + case "store_size": + if err := dec.Decode(&s.StoreSize); err != nil { + return err + } + + case "store_size_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.StoreSizeBytes = value + case float64: + f := int(v) + s.StoreSizeBytes = f + } + + } + } + return nil +} + // NewDataStreamsStatsItem returns a DataStreamsStatsItem. func NewDataStreamsStatsItem() *DataStreamsStatsItem { r := &DataStreamsStatsItem{} diff --git a/typedapi/types/datastreamtimestamp.go b/typedapi/types/datastreamtimestamp.go old mode 100755 new mode 100644 index 9f78fa63a8..0a48631925 --- a/typedapi/types/datastreamtimestamp.go +++ b/typedapi/types/datastreamtimestamp.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataStreamTimestamp type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/TypeMapping.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/TypeMapping.ts#L55-L57 type DataStreamTimestamp struct { Enabled bool `json:"enabled"` } +func (s *DataStreamTimestamp) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewDataStreamTimestamp returns a DataStreamTimestamp. func NewDataStreamTimestamp() *DataStreamTimestamp { r := &DataStreamTimestamp{} diff --git a/typedapi/types/datastreamtimestampfield.go b/typedapi/types/datastreamtimestampfield.go old mode 100755 new mode 100644 index 51725e27b7..d3a0f75949 --- a/typedapi/types/datastreamtimestampfield.go +++ b/typedapi/types/datastreamtimestampfield.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DataStreamTimestampField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/DataStream.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/DataStream.ts#L48-L50 type DataStreamTimestampField struct { Name string `json:"name"` } +func (s *DataStreamTimestampField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewDataStreamTimestampField returns a DataStreamTimestampField. func NewDataStreamTimestampField() *DataStreamTimestampField { r := &DataStreamTimestampField{} diff --git a/typedapi/types/datastreamvisibility.go b/typedapi/types/datastreamvisibility.go old mode 100755 new mode 100644 index 57f9498515..98d01d91e4 --- a/typedapi/types/datastreamvisibility.go +++ b/typedapi/types/datastreamvisibility.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataStreamVisibility type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/DataStream.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/DataStream.ts#L57-L59 type DataStreamVisibility struct { Hidden *bool `json:"hidden,omitempty"` } +func (s *DataStreamVisibility) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Hidden = &value + case bool: + s.Hidden = &v + } + + } + } + return nil +} + // NewDataStreamVisibility returns a DataStreamVisibility. func NewDataStreamVisibility() *DataStreamVisibility { r := &DataStreamVisibility{} diff --git a/typedapi/types/datatierphasestatistics.go b/typedapi/types/datatierphasestatistics.go old mode 100755 new mode 100644 index 17fd75dca3..4d10a0b3b0 --- a/typedapi/types/datatierphasestatistics.go +++ b/typedapi/types/datatierphasestatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataTierPhaseStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L86-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L86-L97 type DataTierPhaseStatistics struct { DocCount int64 `json:"doc_count"` IndexCount int64 `json:"index_count"` @@ -36,6 +46,176 @@ type DataTierPhaseStatistics struct { TotalSizeBytes int64 `json:"total_size_bytes"` } +func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f + } + + case "index_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexCount = value + case float64: + f := int64(v) + s.IndexCount = f + } + + case "node_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NodeCount = value + case float64: + f := int64(v) + s.NodeCount = f + } + + case "primary_shard_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryShardCount = value + case float64: + f := int64(v) + s.PrimaryShardCount = f + } + + case "primary_shard_size_avg_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryShardSizeAvgBytes = value + case float64: + f := int64(v) + s.PrimaryShardSizeAvgBytes = f + } + + case "primary_shard_size_mad_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryShardSizeMadBytes = value + case float64: + f := int64(v) + s.PrimaryShardSizeMadBytes = f + } + + case "primary_shard_size_median_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryShardSizeMedianBytes = value + case float64: + f := int64(v) + s.PrimaryShardSizeMedianBytes = f + } + + case "primary_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimarySizeBytes = value + case float64: + f := int64(v) + s.PrimarySizeBytes = f + } + + case "total_shard_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalShardCount = value + case float64: + f := int64(v) + s.TotalShardCount = f + } + + case "total_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSizeBytes = value + case float64: + f := int64(v) + s.TotalSizeBytes = f + } + + } + } + return nil +} + // NewDataTierPhaseStatistics returns a DataTierPhaseStatistics. func NewDataTierPhaseStatistics() *DataTierPhaseStatistics { r := &DataTierPhaseStatistics{} diff --git a/typedapi/types/datatiers.go b/typedapi/types/datatiers.go old mode 100755 new mode 100644 index 0b1dc143e6..b41357c918 --- a/typedapi/types/datatiers.go +++ b/typedapi/types/datatiers.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DataTiers type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L333-L340 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L333-L340 type DataTiers struct { Available bool `json:"available"` DataCold DataTierPhaseStatistics `json:"data_cold"` @@ -33,6 +43,79 @@ type DataTiers struct { Enabled bool `json:"enabled"` } +func (s *DataTiers) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "data_cold": + if err := dec.Decode(&s.DataCold); err != nil { + return err + } + + case "data_content": + if err := dec.Decode(&s.DataContent); err != nil { + return err + } + + case "data_frozen": + if err := dec.Decode(&s.DataFrozen); err != nil { + return err + } + + case "data_hot": + if err := dec.Decode(&s.DataHot); err != nil { + return err + } + + case "data_warm": + if err := dec.Decode(&s.DataWarm); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewDataTiers returns a DataTiers. func NewDataTiers() *DataTiers { r := &DataTiers{} diff --git a/typedapi/types/datedecayfunction.go b/typedapi/types/datedecayfunction.go old mode 100755 new mode 100644 index 6a783b4973..83e4d4b0c0 --- a/typedapi/types/datedecayfunction.go +++ b/typedapi/types/datedecayfunction.go @@ -16,25 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DateDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L92-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L92-L94 type DateDecayFunction struct { - DateDecayFunction map[string]DecayPlacementDateMathDuration `json:"-"` + DateDecayFunction map[string]DecayPlacementDateMathDuration `json:"DateDecayFunction,omitempty"` MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` } +func (s *DateDecayFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "DateDecayFunction": + if s.DateDecayFunction == nil { + s.DateDecayFunction = make(map[string]DecayPlacementDateMathDuration, 0) + } + if err := dec.Decode(&s.DateDecayFunction); err != nil { + return err + } + + case "multi_value_mode": + if err := dec.Decode(&s.MultiValueMode); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s DateDecayFunction) MarshalJSON() ([]byte, error) { type opt DateDecayFunction @@ -54,6 +94,7 @@ func (s DateDecayFunction) MarshalJSON() ([]byte, error) { for key, value := range s.DateDecayFunction { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "DateDecayFunction") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/datedistancefeaturequery.go b/typedapi/types/datedistancefeaturequery.go old mode 100755 new mode 100644 index 6da8cbc6e0..7be6c39bf0 --- a/typedapi/types/datedistancefeaturequery.go +++ b/typedapi/types/datedistancefeaturequery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DateDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L51-L54 type DateDistanceFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -31,6 +41,65 @@ type DateDistanceFeatureQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *DateDistanceFeatureQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewDateDistanceFeatureQuery returns a DateDistanceFeatureQuery. func NewDateDistanceFeatureQuery() *DateDistanceFeatureQuery { r := &DateDistanceFeatureQuery{} diff --git a/typedapi/types/datehistogramaggregate.go b/typedapi/types/datehistogramaggregate.go old mode 100755 new mode 100644 index c3f7a78957..7adef4d6b8 --- a/typedapi/types/datehistogramaggregate.go +++ b/typedapi/types/datehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // DateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L347-L348 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L348-L349 type DateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *DateHistogramAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *DateHistogramAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DateHistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DateHistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/datehistogramaggregation.go b/typedapi/types/datehistogramaggregation.go old mode 100755 new mode 100644 index bc6370a7d0..cffb01f6a9 --- a/typedapi/types/datehistogramaggregation.go +++ b/typedapi/types/datehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L93-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L93-L110 type DateHistogramAggregation struct { CalendarInterval *calendarinterval.CalendarInterval `json:"calendar_interval,omitempty"` ExtendedBounds *ExtendedBoundsFieldDateMath `json:"extended_bounds,omitempty"` @@ -43,7 +45,7 @@ type DateHistogramAggregation struct { HardBounds *ExtendedBoundsFieldDateMath `json:"hard_bounds,omitempty"` Interval Duration `json:"interval,omitempty"` Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinDocCount *int `json:"min_doc_count,omitempty"` Missing DateTime `json:"missing,omitempty"` Name *string `json:"name,omitempty"` @@ -55,6 +57,7 @@ type DateHistogramAggregation struct { } func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -89,9 +92,12 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "hard_bounds": if err := dec.Decode(&s.HardBounds); err != nil { @@ -104,8 +110,17 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - if err := dec.Decode(&s.Keyed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v } case "meta": @@ -114,8 +129,19 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { } case "min_doc_count": - if err := dec.Decode(&s.MinDocCount); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int(v) + s.MinDocCount = &f } case "missing": @@ -124,9 +150,12 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "offset": if err := dec.Decode(&s.Offset); err != nil { @@ -140,19 +169,24 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o - case '[': o := make([]map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o } case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Params); err != nil { return err } diff --git a/typedapi/types/datehistogrambucket.go b/typedapi/types/datehistogrambucket.go old mode 100755 new mode 100644 index a8234c5820..c211f4514f --- a/typedapi/types/datehistogrambucket.go +++ b/typedapi/types/datehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // DateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L350-L353 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L351-L354 type DateHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -43,6 +45,7 @@ type DateHistogramBucket struct { } func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -56,451 +59,19 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": @@ -509,9 +80,525 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -537,6 +624,7 @@ func (s DateHistogramBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/datehistogramgrouping.go b/typedapi/types/datehistogramgrouping.go old mode 100755 new mode 100644 index 6d1b6cab09..b46dafa328 --- a/typedapi/types/datehistogramgrouping.go +++ b/typedapi/types/datehistogramgrouping.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DateHistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Groupings.ts#L30-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Groupings.ts#L30-L38 type DateHistogramGrouping struct { CalendarInterval Duration `json:"calendar_interval,omitempty"` Delay Duration `json:"delay,omitempty"` @@ -33,6 +41,64 @@ type DateHistogramGrouping struct { TimeZone *string `json:"time_zone,omitempty"` } +func (s *DateHistogramGrouping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "calendar_interval": + if err := dec.Decode(&s.CalendarInterval); err != nil { + return err + } + + case "delay": + if err := dec.Decode(&s.Delay); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "fixed_interval": + if err := dec.Decode(&s.FixedInterval); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "interval": + if err := dec.Decode(&s.Interval); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + } + } + return nil +} + // NewDateHistogramGrouping returns a DateHistogramGrouping. func NewDateHistogramGrouping() *DateHistogramGrouping { r := &DateHistogramGrouping{} diff --git a/typedapi/types/dateindexnameprocessor.go b/typedapi/types/dateindexnameprocessor.go old mode 100755 new mode 100644 index c1d4eabc6f..0d7ed99402 --- a/typedapi/types/dateindexnameprocessor.go +++ b/typedapi/types/dateindexnameprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DateIndexNameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L164-L177 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L164-L177 type DateIndexNameProcessor struct { DateFormats []string `json:"date_formats"` // DateRounding How to round the date when formatting the date into the index name. Valid @@ -43,6 +53,119 @@ type DateIndexNameProcessor struct { Timezone *string `json:"timezone,omitempty"` } +func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "date_formats": + if err := dec.Decode(&s.DateFormats); err != nil { + return err + } + + case "date_rounding": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DateRounding = o + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "index_name_format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexNameFormat = &o + + case "index_name_prefix": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexNamePrefix = &o + + case "locale": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Locale = &o + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "timezone": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Timezone = &o + + } + } + return nil +} + // NewDateIndexNameProcessor returns a DateIndexNameProcessor. func NewDateIndexNameProcessor() *DateIndexNameProcessor { r := &DateIndexNameProcessor{} diff --git a/typedapi/types/datenanosproperty.go b/typedapi/types/datenanosproperty.go old mode 100755 new mode 100644 index 5d8a9ba1a4..9747930bff --- a/typedapi/types/datenanosproperty.go +++ b/typedapi/types/datenanosproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DateNanosProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L73-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L73-L81 type DateNanosProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -54,6 +56,7 @@ type DateNanosProperty struct { } func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -68,18 +71,49 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -88,6 +122,9 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -375,33 +412,68 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -412,11 +484,25 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } case "precision_step": - if err := dec.Decode(&s.PrecisionStep); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrecisionStep = &value + case float64: + f := int(v) + s.PrecisionStep = &f } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -704,20 +790,32 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/dateprocessor.go b/typedapi/types/dateprocessor.go old mode 100755 new mode 100644 index 0541090d10..155669665c --- a/typedapi/types/dateprocessor.go +++ b/typedapi/types/dateprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DateProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L179-L185 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L179-L185 type DateProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -36,6 +46,100 @@ type DateProcessor struct { Timezone *string `json:"timezone,omitempty"` } +func (s *DateProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "formats": + if err := dec.Decode(&s.Formats); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "locale": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Locale = &o + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + case "timezone": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Timezone = &o + + } + } + return nil +} + // NewDateProcessor returns a DateProcessor. func NewDateProcessor() *DateProcessor { r := &DateProcessor{} diff --git a/typedapi/types/dateproperty.go b/typedapi/types/dateproperty.go old mode 100755 new mode 100644 index 55091cb06b..f5d26344b1 --- a/typedapi/types/dateproperty.go +++ b/typedapi/types/dateproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DateProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L61-L71 type DateProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -56,6 +58,7 @@ type DateProperty struct { } func (s *DateProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -70,18 +73,49 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -95,6 +129,9 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -382,38 +419,76 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "locale": - if err := dec.Decode(&s.Locale); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Locale = &o case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -424,11 +499,25 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } case "precision_step": - if err := dec.Decode(&s.PrecisionStep); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrecisionStep = &value + case float64: + f := int(v) + s.PrecisionStep = &f } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -716,20 +805,32 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/daterangeaggregate.go b/typedapi/types/daterangeaggregate.go old mode 100755 new mode 100644 index 8997d14f25..c53bc7dff1 --- a/typedapi/types/daterangeaggregate.go +++ b/typedapi/types/daterangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // DateRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L542-L547 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L543-L548 type DateRangeAggregate struct { - Buckets BucketsRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *DateRangeAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *DateRangeAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]RangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []RangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/daterangeaggregation.go b/typedapi/types/daterangeaggregation.go old mode 100755 new mode 100644 index 45dcd0ae29..cc5fc1997d --- a/typedapi/types/daterangeaggregation.go +++ b/typedapi/types/daterangeaggregation.go @@ -16,26 +16,107 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // DateRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L131-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L131-L138 type DateRangeAggregation struct { - Field *string `json:"field,omitempty"` - Format *string `json:"format,omitempty"` - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` - Ranges []DateRangeExpression `json:"ranges,omitempty"` - TimeZone *string `json:"time_zone,omitempty"` + Field *string `json:"field,omitempty"` + Format *string `json:"format,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing Missing `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` + Ranges []DateRangeExpression `json:"ranges,omitempty"` + TimeZone *string `json:"time_zone,omitempty"` +} + +func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "ranges": + if err := dec.Decode(&s.Ranges); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + } + } + return nil } // NewDateRangeAggregation returns a DateRangeAggregation. diff --git a/typedapi/types/daterangeexpression.go b/typedapi/types/daterangeexpression.go old mode 100755 new mode 100644 index 65f59c09a6..6f79655536 --- a/typedapi/types/daterangeexpression.go +++ b/typedapi/types/daterangeexpression.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DateRangeExpression type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L149-L153 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L149-L153 type DateRangeExpression struct { From FieldDateMath `json:"from,omitempty"` Key *string `json:"key,omitempty"` To FieldDateMath `json:"to,omitempty"` } +func (s *DateRangeExpression) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from": + if err := dec.Decode(&s.From); err != nil { + return err + } + + case "key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Key = &o + + case "to": + if err := dec.Decode(&s.To); err != nil { + return err + } + + } + } + return nil +} + // NewDateRangeExpression returns a DateRangeExpression. func NewDateRangeExpression() *DateRangeExpression { r := &DateRangeExpression{} diff --git a/typedapi/types/daterangeproperty.go b/typedapi/types/daterangeproperty.go old mode 100755 new mode 100644 index 239ed30c69..369af36518 --- a/typedapi/types/daterangeproperty.go +++ b/typedapi/types/daterangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DateRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L29-L32 type DateRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -52,6 +54,7 @@ type DateRangeProperty struct { } func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -66,23 +69,63 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -91,6 +134,9 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -378,33 +424,62 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -692,20 +767,32 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/daterangequery.go b/typedapi/types/daterangequery.go old mode 100755 new mode 100644 index 2a6a2eca22..fa7cfe5d74 --- a/typedapi/types/daterangequery.go +++ b/typedapi/types/daterangequery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // DateRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L72-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L72-L81 type DateRangeQuery struct { Boost *float32 `json:"boost,omitempty"` Format *string `json:"format,omitempty"` @@ -41,6 +49,95 @@ type DateRangeQuery struct { To string `json:"to,omitempty"` } +func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "format": + if err := dec.Decode(&s.Format); err != nil { + return err + } + + case "from": + if err := dec.Decode(&s.From); err != nil { + return err + } + + case "gt": + if err := dec.Decode(&s.Gt); err != nil { + return err + } + + case "gte": + if err := dec.Decode(&s.Gte); err != nil { + return err + } + + case "lt": + if err := dec.Decode(&s.Lt); err != nil { + return err + } + + case "lte": + if err := dec.Decode(&s.Lte); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + case "to": + if err := dec.Decode(&s.To); err != nil { + return err + } + + } + } + return nil +} + // NewDateRangeQuery returns a DateRangeQuery. func NewDateRangeQuery() *DateRangeQuery { r := &DateRangeQuery{} diff --git a/typedapi/types/datetime.go b/typedapi/types/datetime.go old mode 100755 new mode 100644 index 0a295cd993..d102e00aa2 --- a/typedapi/types/datetime.go +++ b/typedapi/types/datetime.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // int64 // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L22-L27 type DateTime interface{} diff --git a/typedapi/types/decayfunction.go b/typedapi/types/decayfunction.go old mode 100755 new mode 100644 index 1805a4c6c8..d20f2bdd5a --- a/typedapi/types/decayfunction.go +++ b/typedapi/types/decayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ package types // NumericDecayFunction // GeoDecayFunction // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L100-L105 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L100-L105 type DecayFunction interface{} diff --git a/typedapi/types/decayplacementdatemathduration.go b/typedapi/types/decayplacementdatemathduration.go old mode 100755 new mode 100644 index d51e9125f5..bbb5b25bd6 --- a/typedapi/types/decayplacementdatemathduration.go +++ b/typedapi/types/decayplacementdatemathduration.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DecayPlacementDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L77-L82 type DecayPlacementDateMathDuration struct { Decay *Float64 `json:"decay,omitempty"` Offset Duration `json:"offset,omitempty"` @@ -30,6 +40,57 @@ type DecayPlacementDateMathDuration struct { Scale Duration `json:"scale,omitempty"` } +func (s *DecayPlacementDateMathDuration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decay": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Decay = &f + case float64: + f := Float64(v) + s.Decay = &f + } + + case "offset": + if err := dec.Decode(&s.Offset); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "scale": + if err := dec.Decode(&s.Scale); err != nil { + return err + } + + } + } + return nil +} + // NewDecayPlacementDateMathDuration returns a DecayPlacementDateMathDuration. func NewDecayPlacementDateMathDuration() *DecayPlacementDateMathDuration { r := &DecayPlacementDateMathDuration{} diff --git a/typedapi/types/decayplacementdoubledouble.go b/typedapi/types/decayplacementdoubledouble.go old mode 100755 new mode 100644 index 083cd8257d..d257120575 --- a/typedapi/types/decayplacementdoubledouble.go +++ b/typedapi/types/decayplacementdoubledouble.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DecayPlacementdoubledouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L77-L82 type DecayPlacementdoubledouble struct { Decay *Float64 `json:"decay,omitempty"` Offset *Float64 `json:"offset,omitempty"` @@ -30,6 +40,90 @@ type DecayPlacementdoubledouble struct { Scale *Float64 `json:"scale,omitempty"` } +func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decay": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Decay = &f + case float64: + f := Float64(v) + s.Decay = &f + } + + case "offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Offset = &f + case float64: + f := Float64(v) + s.Offset = &f + } + + case "origin": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Origin = &f + case float64: + f := Float64(v) + s.Origin = &f + } + + case "scale": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Scale = &f + case float64: + f := Float64(v) + s.Scale = &f + } + + } + } + return nil +} + // NewDecayPlacementdoubledouble returns a DecayPlacementdoubledouble. func NewDecayPlacementdoubledouble() *DecayPlacementdoubledouble { r := &DecayPlacementdoubledouble{} diff --git a/typedapi/types/decayplacementgeolocationdistance.go b/typedapi/types/decayplacementgeolocationdistance.go old mode 100755 new mode 100644 index 612b98af43..a610043e91 --- a/typedapi/types/decayplacementgeolocationdistance.go +++ b/typedapi/types/decayplacementgeolocationdistance.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DecayPlacementGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L77-L82 type DecayPlacementGeoLocationDistance struct { Decay *Float64 `json:"decay,omitempty"` Offset *string `json:"offset,omitempty"` @@ -30,6 +40,57 @@ type DecayPlacementGeoLocationDistance struct { Scale *string `json:"scale,omitempty"` } +func (s *DecayPlacementGeoLocationDistance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decay": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Decay = &f + case float64: + f := Float64(v) + s.Decay = &f + } + + case "offset": + if err := dec.Decode(&s.Offset); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "scale": + if err := dec.Decode(&s.Scale); err != nil { + return err + } + + } + } + return nil +} + // NewDecayPlacementGeoLocationDistance returns a DecayPlacementGeoLocationDistance. func NewDecayPlacementGeoLocationDistance() *DecayPlacementGeoLocationDistance { r := &DecayPlacementGeoLocationDistance{} diff --git a/typedapi/types/defaults.go b/typedapi/types/defaults.go old mode 100755 new mode 100644 index ae9642be38..4f677d4bf5 --- a/typedapi/types/defaults.go +++ b/typedapi/types/defaults.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Defaults type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/types.ts#L24-L27 type Defaults struct { AnomalyDetectors AnomalyDetectors `json:"anomaly_detectors"` Datafeeds Datafeeds `json:"datafeeds"` diff --git a/typedapi/types/definition.go b/typedapi/types/definition.go old mode 100755 new mode 100644 index cf592aa9e1..fac5f37777 --- a/typedapi/types/definition.go +++ b/typedapi/types/definition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Definition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L24-L29 type Definition struct { // Preprocessors Collection of preprocessors Preprocessors []Preprocessor `json:"preprocessors,omitempty"` diff --git a/typedapi/types/delayeddatacheckconfig.go b/typedapi/types/delayeddatacheckconfig.go old mode 100755 new mode 100644 index 0746707da2..3710b14aa5 --- a/typedapi/types/delayeddatacheckconfig.go +++ b/typedapi/types/delayeddatacheckconfig.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DelayedDataCheckConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L119-L130 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L119-L130 type DelayedDataCheckConfig struct { // CheckWindow The window of time that is searched for late data. This window of time ends // with the latest finalized bucket. @@ -35,6 +45,45 @@ type DelayedDataCheckConfig struct { Enabled bool `json:"enabled"` } +func (s *DelayedDataCheckConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "check_window": + if err := dec.Decode(&s.CheckWindow); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewDelayedDataCheckConfig returns a DelayedDataCheckConfig. func NewDelayedDataCheckConfig() *DelayedDataCheckConfig { r := &DelayedDataCheckConfig{} diff --git a/typedapi/types/delimitedpayloadtokenfilter.go b/typedapi/types/delimitedpayloadtokenfilter.go old mode 100755 new mode 100644 index d10a34afc7..fc5fa22022 --- a/typedapi/types/delimitedpayloadtokenfilter.go +++ b/typedapi/types/delimitedpayloadtokenfilter.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/delimitedpayloadencoding" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DelimitedPayloadTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L67-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L68-L72 type DelimitedPayloadTokenFilter struct { Delimiter *string `json:"delimiter,omitempty"` Encoding *delimitedpayloadencoding.DelimitedPayloadEncoding `json:"encoding,omitempty"` @@ -34,6 +40,49 @@ type DelimitedPayloadTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *DelimitedPayloadTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "delimiter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Delimiter = &o + + case "encoding": + if err := dec.Decode(&s.Encoding); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewDelimitedPayloadTokenFilter returns a DelimitedPayloadTokenFilter. func NewDelimitedPayloadTokenFilter() *DelimitedPayloadTokenFilter { r := &DelimitedPayloadTokenFilter{} diff --git a/typedapi/types/densevectorindexoptions.go b/typedapi/types/densevectorindexoptions.go old mode 100755 new mode 100644 index 466b826d0c..eb93167c1d --- a/typedapi/types/densevectorindexoptions.go +++ b/typedapi/types/densevectorindexoptions.go @@ -16,19 +16,89 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DenseVectorIndexOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 type DenseVectorIndexOptions struct { EfConstruction int `json:"ef_construction"` M int `json:"m"` Type string `json:"type"` } +func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ef_construction": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EfConstruction = value + case float64: + f := int(v) + s.EfConstruction = f + } + + case "m": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.M = value + case float64: + f := int(v) + s.M = f + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewDenseVectorIndexOptions returns a DenseVectorIndexOptions. func NewDenseVectorIndexOptions() *DenseVectorIndexOptions { r := &DenseVectorIndexOptions{} diff --git a/typedapi/types/densevectorproperty.go b/typedapi/types/densevectorproperty.go old mode 100755 new mode 100644 index b487995036..69a382519d --- a/typedapi/types/densevectorproperty.go +++ b/typedapi/types/densevectorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DenseVectorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/complex.ts#L51-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/complex.ts#L51-L57 type DenseVectorProperty struct { Dims int `json:"dims"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -48,6 +50,7 @@ type DenseVectorProperty struct { } func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,8 +65,19 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { switch t { case "dims": - if err := dec.Decode(&s.Dims); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Dims = value + case float64: + f := int(v) + s.Dims = f } case "dynamic": @@ -72,6 +86,9 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -359,20 +376,40 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -381,11 +418,17 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -673,16 +716,19 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "type": if err := dec.Decode(&s.Type); err != nil { diff --git a/typedapi/types/deprecation.go b/typedapi/types/deprecation.go old mode 100755 new mode 100644 index 1e0072348d..43f4109aca --- a/typedapi/types/deprecation.go +++ b/typedapi/types/deprecation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // Deprecation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/deprecations/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/deprecations/types.ts#L29-L35 type Deprecation struct { Details string `json:"details"` // Level The level property describes the significance of the issue. diff --git a/typedapi/types/deprecationindexing.go b/typedapi/types/deprecationindexing.go old mode 100755 new mode 100644 index 26d630ca33..2497d62eb2 --- a/typedapi/types/deprecationindexing.go +++ b/typedapi/types/deprecationindexing.go @@ -16,17 +16,52 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // DeprecationIndexing type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L140-L142 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L140-L142 type DeprecationIndexing struct { Enabled string `json:"enabled"` } +func (s *DeprecationIndexing) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Enabled = o + + } + } + return nil +} + // NewDeprecationIndexing returns a DeprecationIndexing. func NewDeprecationIndexing() *DeprecationIndexing { r := &DeprecationIndexing{} diff --git a/typedapi/types/derivativeaggregate.go b/typedapi/types/derivativeaggregate.go old mode 100755 new mode 100644 index ceee6de517..744eea1307 --- a/typedapi/types/derivativeaggregate.go +++ b/typedapi/types/derivativeaggregate.go @@ -16,21 +16,27 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // DerivativeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L226-L230 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L227-L231 type DerivativeAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - NormalizedValue *Float64 `json:"normalized_value,omitempty"` - NormalizedValueAsString *string `json:"normalized_value_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + NormalizedValue *Float64 `json:"normalized_value,omitempty"` + NormalizedValueAsString *string `json:"normalized_value_as_string,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -38,6 +44,68 @@ type DerivativeAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "normalized_value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.NormalizedValue = &f + case float64: + f := Float64(v) + s.NormalizedValue = &f + } + + case "normalized_value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NormalizedValueAsString = &o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewDerivativeAggregate returns a DerivativeAggregate. func NewDerivativeAggregate() *DerivativeAggregate { r := &DerivativeAggregate{} diff --git a/typedapi/types/derivativeaggregation.go b/typedapi/types/derivativeaggregation.go old mode 100755 new mode 100644 index eed76ce5da..4420f4055a --- a/typedapi/types/derivativeaggregation.go +++ b/typedapi/types/derivativeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // DerivativeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L165-L165 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L165-L165 type DerivativeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/detectionrule.go b/typedapi/types/detectionrule.go old mode 100755 new mode 100644 index 98f116e367..cd85212d7e --- a/typedapi/types/detectionrule.go +++ b/typedapi/types/detectionrule.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/ruleaction" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DetectionRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Rule.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Rule.ts#L25-L39 type DetectionRule struct { // Actions The set of actions to be triggered when the rule applies. If more than one // action is specified the effects of all actions are combined. @@ -42,6 +48,44 @@ type DetectionRule struct { Scope map[string]FilterRef `json:"scope,omitempty"` } +func (s *DetectionRule) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "conditions": + if err := dec.Decode(&s.Conditions); err != nil { + return err + } + + case "scope": + if s.Scope == nil { + s.Scope = make(map[string]FilterRef, 0) + } + if err := dec.Decode(&s.Scope); err != nil { + return err + } + + } + } + return nil +} + // NewDetectionRule returns a DetectionRule. func NewDetectionRule() *DetectionRule { r := &DetectionRule{ diff --git a/typedapi/types/detector.go b/typedapi/types/detector.go old mode 100755 new mode 100644 index 807240250b..4ed7bef93c --- a/typedapi/types/detector.go +++ b/typedapi/types/detector.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/excludefrequent" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // Detector type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Detector.ts#L25-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Detector.ts#L25-L67 type Detector struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for @@ -66,6 +74,102 @@ type Detector struct { UseNull *bool `json:"use_null,omitempty"` } +func (s *Detector) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "by_field_name": + if err := dec.Decode(&s.ByFieldName); err != nil { + return err + } + + case "custom_rules": + if err := dec.Decode(&s.CustomRules); err != nil { + return err + } + + case "detector_description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DetectorDescription = &o + + case "detector_index": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DetectorIndex = &value + case float64: + f := int(v) + s.DetectorIndex = &f + } + + case "exclude_frequent": + if err := dec.Decode(&s.ExcludeFrequent); err != nil { + return err + } + + case "field_name": + if err := dec.Decode(&s.FieldName); err != nil { + return err + } + + case "function": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Function = &o + + case "over_field_name": + if err := dec.Decode(&s.OverFieldName); err != nil { + return err + } + + case "partition_field_name": + if err := dec.Decode(&s.PartitionFieldName); err != nil { + return err + } + + case "use_null": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UseNull = &value + case bool: + s.UseNull = &v + } + + } + } + return nil +} + // NewDetector returns a Detector. func NewDetector() *Detector { r := &Detector{} diff --git a/typedapi/types/detectorread.go b/typedapi/types/detectorread.go old mode 100755 new mode 100644 index eb0cd8666e..263b835407 --- a/typedapi/types/detectorread.go +++ b/typedapi/types/detectorread.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/excludefrequent" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // DetectorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Detector.ts#L69-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Detector.ts#L69-L80 type DetectorRead struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for @@ -66,6 +74,102 @@ type DetectorRead struct { UseNull *bool `json:"use_null,omitempty"` } +func (s *DetectorRead) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "by_field_name": + if err := dec.Decode(&s.ByFieldName); err != nil { + return err + } + + case "custom_rules": + if err := dec.Decode(&s.CustomRules); err != nil { + return err + } + + case "detector_description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DetectorDescription = &o + + case "detector_index": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DetectorIndex = &value + case float64: + f := int(v) + s.DetectorIndex = &f + } + + case "exclude_frequent": + if err := dec.Decode(&s.ExcludeFrequent); err != nil { + return err + } + + case "field_name": + if err := dec.Decode(&s.FieldName); err != nil { + return err + } + + case "function": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Function = o + + case "over_field_name": + if err := dec.Decode(&s.OverFieldName); err != nil { + return err + } + + case "partition_field_name": + if err := dec.Decode(&s.PartitionFieldName); err != nil { + return err + } + + case "use_null": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UseNull = &value + case bool: + s.UseNull = &v + } + + } + } + return nil +} + // NewDetectorRead returns a DetectorRead. func NewDetectorRead() *DetectorRead { r := &DetectorRead{} diff --git a/typedapi/types/diagnosis.go b/typedapi/types/diagnosis.go new file mode 100644 index 0000000000..b34af99787 --- /dev/null +++ b/typedapi/types/diagnosis.go @@ -0,0 +1,39 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// Diagnosis type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L48-L54 +type Diagnosis struct { + Action string `json:"action"` + AffectedResources DiagnosisAffectedResources `json:"affected_resources"` + Cause string `json:"cause"` + HelpUrl string `json:"help_url"` + Id string `json:"id"` +} + +// NewDiagnosis returns a Diagnosis. +func NewDiagnosis() *Diagnosis { + r := &Diagnosis{} + + return r +} diff --git a/typedapi/types/diagnosisaffectedresources.go b/typedapi/types/diagnosisaffectedresources.go new file mode 100644 index 0000000000..231993cd61 --- /dev/null +++ b/typedapi/types/diagnosisaffectedresources.go @@ -0,0 +1,103 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// DiagnosisAffectedResources type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L56-L62 +type DiagnosisAffectedResources struct { + FeatureStates []string `json:"feature_states,omitempty"` + Indices []string `json:"indices,omitempty"` + Nodes []IndicatorNode `json:"nodes,omitempty"` + SlmPolicies []string `json:"slm_policies,omitempty"` + SnapshotRepositories []string `json:"snapshot_repositories,omitempty"` +} + +func (s *DiagnosisAffectedResources) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_states": + if err := dec.Decode(&s.FeatureStates); err != nil { + return err + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "nodes": + if err := dec.Decode(&s.Nodes); err != nil { + return err + } + + case "slm_policies": + if err := dec.Decode(&s.SlmPolicies); err != nil { + return err + } + + case "snapshot_repositories": + if err := dec.Decode(&s.SnapshotRepositories); err != nil { + return err + } + + } + } + return nil +} + +// NewDiagnosisAffectedResources returns a DiagnosisAffectedResources. +func NewDiagnosisAffectedResources() *DiagnosisAffectedResources { + r := &DiagnosisAffectedResources{} + + return r +} diff --git a/typedapi/types/dictionarydecompoundertokenfilter.go b/typedapi/types/dictionarydecompoundertokenfilter.go old mode 100755 new mode 100644 index ea24d7110a..c3276fbf68 --- a/typedapi/types/dictionarydecompoundertokenfilter.go +++ b/typedapi/types/dictionarydecompoundertokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DictionaryDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L53-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L54-L56 type DictionaryDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -35,6 +45,119 @@ type DictionaryDecompounderTokenFilter struct { WordListPath *string `json:"word_list_path,omitempty"` } +func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hyphenation_patterns_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HyphenationPatternsPath = &o + + case "max_subword_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxSubwordSize = &value + case float64: + f := int(v) + s.MaxSubwordSize = &f + } + + case "min_subword_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinSubwordSize = &value + case float64: + f := int(v) + s.MinSubwordSize = &f + } + + case "min_word_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordSize = &value + case float64: + f := int(v) + s.MinWordSize = &f + } + + case "only_longest_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OnlyLongestMatch = &value + case bool: + s.OnlyLongestMatch = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "word_list": + if err := dec.Decode(&s.WordList); err != nil { + return err + } + + case "word_list_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WordListPath = &o + + } + } + return nil +} + // NewDictionaryDecompounderTokenFilter returns a DictionaryDecompounderTokenFilter. func NewDictionaryDecompounderTokenFilter() *DictionaryDecompounderTokenFilter { r := &DictionaryDecompounderTokenFilter{} diff --git a/typedapi/types/directgenerator.go b/typedapi/types/directgenerator.go old mode 100755 new mode 100644 index cc40e89307..ee8da9c682 --- a/typedapi/types/directgenerator.go +++ b/typedapi/types/directgenerator.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/suggestmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // DirectGenerator type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L166-L178 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L170-L182 type DirectGenerator struct { Field string `json:"field"` MaxEdits *int `json:"max_edits,omitempty"` @@ -41,6 +49,164 @@ type DirectGenerator struct { SuggestMode *suggestmode.SuggestMode `json:"suggest_mode,omitempty"` } +func (s *DirectGenerator) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "max_edits": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxEdits = &value + case float64: + f := int(v) + s.MaxEdits = &f + } + + case "max_inspections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MaxInspections = &f + case float64: + f := float32(v) + s.MaxInspections = &f + } + + case "max_term_freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MaxTermFreq = &f + case float64: + f := float32(v) + s.MaxTermFreq = &f + } + + case "min_doc_freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MinDocFreq = &f + case float64: + f := float32(v) + s.MinDocFreq = &f + } + + case "min_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordLength = &value + case float64: + f := int(v) + s.MinWordLength = &f + } + + case "post_filter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PostFilter = &o + + case "pre_filter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PreFilter = &o + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "suggest_mode": + if err := dec.Decode(&s.SuggestMode); err != nil { + return err + } + + } + } + return nil +} + // NewDirectGenerator returns a DirectGenerator. func NewDirectGenerator() *DirectGenerator { r := &DirectGenerator{} diff --git a/typedapi/types/discovery.go b/typedapi/types/discovery.go old mode 100755 new mode 100644 index 10f355add7..810e196682 --- a/typedapi/types/discovery.go +++ b/typedapi/types/discovery.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Discovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L82-L88 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L82-L88 type Discovery struct { ClusterApplierStats *ClusterAppliedStats `json:"cluster_applier_stats,omitempty"` ClusterStateQueue *ClusterStateQueue `json:"cluster_state_queue,omitempty"` diff --git a/typedapi/types/discoverynode.go b/typedapi/types/discoverynode.go old mode 100755 new mode 100644 index 196e02f0d3..d1ddceeb21 --- a/typedapi/types/discoverynode.go +++ b/typedapi/types/discoverynode.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DiscoveryNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DiscoveryNode.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DiscoveryNode.ts#L24-L30 type DiscoveryNode struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` @@ -31,6 +39,54 @@ type DiscoveryNode struct { TransportAddress string `json:"transport_address"` } +func (s *DiscoveryNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "ephemeral_id": + if err := dec.Decode(&s.EphemeralId); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewDiscoveryNode returns a DiscoveryNode. func NewDiscoveryNode() *DiscoveryNode { r := &DiscoveryNode{ diff --git a/typedapi/types/diskindicator.go b/typedapi/types/diskindicator.go new file mode 100644 index 0000000000..6114a4d0b2 --- /dev/null +++ b/typedapi/types/diskindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// DiskIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L120-L124 +type DiskIndicator struct { + Details *DiskIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewDiskIndicator returns a DiskIndicator. +func NewDiskIndicator() *DiskIndicator { + r := &DiskIndicator{} + + return r +} diff --git a/typedapi/types/diskindicatordetails.go b/typedapi/types/diskindicatordetails.go new file mode 100644 index 0000000000..6161744955 --- /dev/null +++ b/typedapi/types/diskindicatordetails.go @@ -0,0 +1,144 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// DiskIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L125-L131 +type DiskIndicatorDetails struct { + IndicesWithReadonlyBlock int64 `json:"indices_with_readonly_block"` + NodesOverFloodStageWatermark int64 `json:"nodes_over_flood_stage_watermark"` + NodesOverHighWatermark int64 `json:"nodes_over_high_watermark"` + NodesWithEnoughDiskSpace int64 `json:"nodes_with_enough_disk_space"` + NodesWithUnknownDiskStatus int64 `json:"nodes_with_unknown_disk_status"` +} + +func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indices_with_readonly_block": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndicesWithReadonlyBlock = value + case float64: + f := int64(v) + s.IndicesWithReadonlyBlock = f + } + + case "nodes_over_flood_stage_watermark": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NodesOverFloodStageWatermark = value + case float64: + f := int64(v) + s.NodesOverFloodStageWatermark = f + } + + case "nodes_over_high_watermark": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NodesOverHighWatermark = value + case float64: + f := int64(v) + s.NodesOverHighWatermark = f + } + + case "nodes_with_enough_disk_space": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NodesWithEnoughDiskSpace = value + case float64: + f := int64(v) + s.NodesWithEnoughDiskSpace = f + } + + case "nodes_with_unknown_disk_status": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NodesWithUnknownDiskStatus = value + case float64: + f := int64(v) + s.NodesWithUnknownDiskStatus = f + } + + } + } + return nil +} + +// NewDiskIndicatorDetails returns a DiskIndicatorDetails. +func NewDiskIndicatorDetails() *DiskIndicatorDetails { + r := &DiskIndicatorDetails{} + + return r +} diff --git a/typedapi/types/diskusage.go b/typedapi/types/diskusage.go old mode 100755 new mode 100644 index 64e93415f6..3d854f9ee1 --- a/typedapi/types/diskusage.go +++ b/typedapi/types/diskusage.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L62-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L62-L69 type DiskUsage struct { FreeBytes int64 `json:"free_bytes"` FreeDiskPercent Float64 `json:"free_disk_percent"` @@ -32,6 +42,111 @@ type DiskUsage struct { UsedDiskPercent Float64 `json:"used_disk_percent"` } +func (s *DiskUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "free_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeBytes = value + case float64: + f := int64(v) + s.FreeBytes = f + } + + case "free_disk_percent": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FreeDiskPercent = f + case float64: + f := Float64(v) + s.FreeDiskPercent = f + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = o + + case "total_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalBytes = value + case float64: + f := int64(v) + s.TotalBytes = f + } + + case "used_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedBytes = value + case float64: + f := int64(v) + s.UsedBytes = f + } + + case "used_disk_percent": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.UsedDiskPercent = f + case float64: + f := Float64(v) + s.UsedDiskPercent = f + } + + } + } + return nil +} + // NewDiskUsage returns a DiskUsage. func NewDiskUsage() *DiskUsage { r := &DiskUsage{} diff --git a/typedapi/types/dismaxquery.go b/typedapi/types/dismaxquery.go old mode 100755 new mode 100644 index 2902887035..4dd899c20d --- a/typedapi/types/dismaxquery.go +++ b/typedapi/types/dismaxquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DisMaxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L46-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L46-L50 type DisMaxQuery struct { Boost *float32 `json:"boost,omitempty"` Queries []Query `json:"queries"` @@ -30,6 +40,71 @@ type DisMaxQuery struct { TieBreaker *Float64 `json:"tie_breaker,omitempty"` } +func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "queries": + if err := dec.Decode(&s.Queries); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "tie_breaker": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.TieBreaker = &f + case float64: + f := Float64(v) + s.TieBreaker = &f + } + + } + } + return nil +} + // NewDisMaxQuery returns a DisMaxQuery. func NewDisMaxQuery() *DisMaxQuery { r := &DisMaxQuery{} diff --git a/typedapi/types/dissectprocessor.go b/typedapi/types/dissectprocessor.go old mode 100755 new mode 100644 index 8eab7c3232..cdd07f4fb7 --- a/typedapi/types/dissectprocessor.go +++ b/typedapi/types/dissectprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DissectProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L187-L192 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L187-L192 type DissectProcessor struct { AppendSeparator *string `json:"append_separator,omitempty"` Description *string `json:"description,omitempty"` @@ -35,6 +45,104 @@ type DissectProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *DissectProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "append_separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AppendSeparator = &o + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewDissectProcessor returns a DissectProcessor. func NewDissectProcessor() *DissectProcessor { r := &DissectProcessor{} diff --git a/typedapi/types/distancefeaturequery.go b/typedapi/types/distancefeaturequery.go old mode 100755 new mode 100644 index a368425d32..754a218b33 --- a/typedapi/types/distancefeaturequery.go +++ b/typedapi/types/distancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // GeoDistanceFeatureQuery // DateDistanceFeatureQuery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L56-L60 type DistanceFeatureQuery interface{} diff --git a/typedapi/types/distancefeaturequerybasedatemathduration.go b/typedapi/types/distancefeaturequerybasedatemathduration.go old mode 100755 new mode 100644 index a1a40b5828..1675c44d93 --- a/typedapi/types/distancefeaturequerybasedatemathduration.go +++ b/typedapi/types/distancefeaturequerybasedatemathduration.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DistanceFeatureQueryBaseDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L40-L44 type DistanceFeatureQueryBaseDateMathDuration struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -31,6 +41,65 @@ type DistanceFeatureQueryBaseDateMathDuration struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *DistanceFeatureQueryBaseDateMathDuration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewDistanceFeatureQueryBaseDateMathDuration returns a DistanceFeatureQueryBaseDateMathDuration. func NewDistanceFeatureQueryBaseDateMathDuration() *DistanceFeatureQueryBaseDateMathDuration { r := &DistanceFeatureQueryBaseDateMathDuration{} diff --git a/typedapi/types/distancefeaturequerybasegeolocationdistance.go b/typedapi/types/distancefeaturequerybasegeolocationdistance.go old mode 100755 new mode 100644 index 99670c7ef0..057a6ed297 --- a/typedapi/types/distancefeaturequerybasegeolocationdistance.go +++ b/typedapi/types/distancefeaturequerybasegeolocationdistance.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DistanceFeatureQueryBaseGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L40-L44 type DistanceFeatureQueryBaseGeoLocationDistance struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -31,6 +41,65 @@ type DistanceFeatureQueryBaseGeoLocationDistance struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *DistanceFeatureQueryBaseGeoLocationDistance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewDistanceFeatureQueryBaseGeoLocationDistance returns a DistanceFeatureQueryBaseGeoLocationDistance. func NewDistanceFeatureQueryBaseGeoLocationDistance() *DistanceFeatureQueryBaseGeoLocationDistance { r := &DistanceFeatureQueryBaseGeoLocationDistance{} diff --git a/typedapi/types/diversifiedsampleraggregation.go b/typedapi/types/diversifiedsampleraggregation.go old mode 100755 new mode 100644 index d3c2f26aa4..303bb1d7d4 --- a/typedapi/types/diversifiedsampleraggregation.go +++ b/typedapi/types/diversifiedsampleraggregation.go @@ -16,29 +16,115 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sampleraggregationexecutionhint" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // DiversifiedSamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L155-L161 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L155-L161 type DiversifiedSamplerAggregation struct { ExecutionHint *sampleraggregationexecutionhint.SamplerAggregationExecutionHint `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` MaxDocsPerValue *int `json:"max_docs_per_value,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Script Script `json:"script,omitempty"` ShardSize *int `json:"shard_size,omitempty"` } +func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "execution_hint": + if err := dec.Decode(&s.ExecutionHint); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "max_docs_per_value": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDocsPerValue = &value + case float64: + f := int(v) + s.MaxDocsPerValue = &f + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + } + } + return nil +} + // NewDiversifiedSamplerAggregation returns a DiversifiedSamplerAggregation. func NewDiversifiedSamplerAggregation() *DiversifiedSamplerAggregation { r := &DiversifiedSamplerAggregation{} diff --git a/typedapi/types/docstats.go b/typedapi/types/docstats.go old mode 100755 new mode 100644 index 62aed0e466..b61e0d21bf --- a/typedapi/types/docstats.go +++ b/typedapi/types/docstats.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DocStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L64-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L64-L67 type DocStats struct { Count int64 `json:"count"` Deleted *int64 `json:"deleted,omitempty"` } +func (s *DocStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "deleted": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Deleted = &value + case float64: + f := int64(v) + s.Deleted = &f + } + + } + } + return nil +} + // NewDocStats returns a DocStats. func NewDocStats() *DocStats { r := &DocStats{} diff --git a/typedapi/types/document.go b/typedapi/types/document.go old mode 100755 new mode 100644 index 771af25e53..b84ca0814c --- a/typedapi/types/document.go +++ b/typedapi/types/document.go @@ -16,23 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // Document type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/types.ts#L41-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/types.ts#L41-L45 type Document struct { Id_ *string `json:"_id,omitempty"` Index_ *string `json:"_index,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` } +func (s *Document) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + } + } + return nil +} + // NewDocument returns a Document. func NewDocument() *Document { r := &Document{} diff --git a/typedapi/types/documentrating.go b/typedapi/types/documentrating.go old mode 100755 new mode 100644 index db8491cd2c..e561962d04 --- a/typedapi/types/documentrating.go +++ b/typedapi/types/documentrating.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DocumentRating type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L116-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L116-L123 type DocumentRating struct { // Id_ The document ID. Id_ string `json:"_id"` @@ -33,6 +43,52 @@ type DocumentRating struct { Rating int `json:"rating"` } +func (s *DocumentRating) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "rating": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Rating = value + case float64: + f := int(v) + s.Rating = f + } + + } + } + return nil +} + // NewDocumentRating returns a DocumentRating. func NewDocumentRating() *DocumentRating { r := &DocumentRating{} diff --git a/typedapi/types/documentsimulation.go b/typedapi/types/documentsimulation.go old mode 100755 new mode 100644 index 9de54b9d84..b7589aaa66 --- a/typedapi/types/documentsimulation.go +++ b/typedapi/types/documentsimulation.go @@ -16,20 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // DocumentSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/types.ts#L47-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/types.ts#L47-L60 type DocumentSimulation struct { DocumentSimulation map[string]string `json:"-"` Id_ string `json:"_id"` @@ -41,6 +46,80 @@ type DocumentSimulation struct { Version_ StringifiedVersionNumber `json:"_version,omitempty"` } +func (s *DocumentSimulation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_ingest": + if err := dec.Decode(&s.Ingest_); err != nil { + return err + } + + case "_routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Routing_ = &o + + case "_source": + if s.Source_ == nil { + s.Source_ = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "_version_type": + if err := dec.Decode(&s.VersionType_); err != nil { + return err + } + + case "_version": + if err := dec.Decode(&s.Version_); err != nil { + return err + } + + default: + + if key, ok := t.(string); ok { + if s.DocumentSimulation == nil { + s.DocumentSimulation = make(map[string]string, 0) + } + raw := new(string) + if err := dec.Decode(&raw); err != nil { + return err + } + s.DocumentSimulation[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s DocumentSimulation) MarshalJSON() ([]byte, error) { type opt DocumentSimulation @@ -60,6 +139,7 @@ func (s DocumentSimulation) MarshalJSON() ([]byte, error) { for key, value := range s.DocumentSimulation { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "DocumentSimulation") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/dotexpanderprocessor.go b/typedapi/types/dotexpanderprocessor.go old mode 100755 new mode 100644 index 075d872dde..0728745a06 --- a/typedapi/types/dotexpanderprocessor.go +++ b/typedapi/types/dotexpanderprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DotExpanderProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L194-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L194-L197 type DotExpanderProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -33,6 +43,82 @@ type DotExpanderProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = &o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewDotExpanderProcessor returns a DotExpanderProcessor. func NewDotExpanderProcessor() *DotExpanderProcessor { r := &DotExpanderProcessor{} diff --git a/typedapi/types/doublenumberproperty.go b/typedapi/types/doublenumberproperty.go old mode 100755 new mode 100644 index add39098c2..a24ab1ae79 --- a/typedapi/types/doublenumberproperty.go +++ b/typedapi/types/doublenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DoubleNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L141-L144 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L141-L144 type DoubleNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type DoubleNumberProperty struct { } func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,35 +435,78 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.NullValue = &f + case float64: + f := Float64(v) + s.NullValue = &f } case "on_script_error": @@ -426,6 +515,9 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +805,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +817,39 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/doublerangeproperty.go b/typedapi/types/doublerangeproperty.go old mode 100755 new mode 100644 index 4c179cca8a..6be3b63742 --- a/typedapi/types/doublerangeproperty.go +++ b/typedapi/types/doublerangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DoubleRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L34-L36 type DoubleRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -51,6 +53,7 @@ type DoubleRangeProperty struct { } func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,23 +68,63 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -90,6 +133,9 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -377,28 +423,54 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +758,32 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/doubletermsaggregate.go b/typedapi/types/doubletermsaggregate.go old mode 100755 new mode 100644 index 960f91b12f..3415db1ca1 --- a/typedapi/types/doubletermsaggregate.go +++ b/typedapi/types/doubletermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DoubleTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L410-L415 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L411-L416 type DoubleTermsAggregate struct { - Buckets BucketsDoubleTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsDoubleTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DoubleTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DoubleTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/doubletermsbucket.go b/typedapi/types/doubletermsbucket.go old mode 100755 new mode 100644 index 24c1796194..37e9769b9d --- a/typedapi/types/doubletermsbucket.go +++ b/typedapi/types/doubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // DoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L417-L420 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L418-L421 type DoubleTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -44,6 +46,7 @@ type DoubleTermsBucket struct { } func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,467 +60,572 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "doc_count_error": - if err := dec.Decode(&s.DocCountError); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountError = &value + case float64: + f := int64(v) + s.DocCountError = &f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Key = f + case float64: + f := Float64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -543,6 +651,7 @@ func (s DoubleTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/downsampleconfig.go b/typedapi/types/downsampleconfig.go old mode 100755 new mode 100644 index 7406e94f96..f2b77e5c4d --- a/typedapi/types/downsampleconfig.go +++ b/typedapi/types/downsampleconfig.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DownsampleConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/Downsample.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/Downsample.ts#L22-L24 type DownsampleConfig struct { FixedInterval string `json:"fixed_interval"` } +func (s *DownsampleConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fixed_interval": + if err := dec.Decode(&s.FixedInterval); err != nil { + return err + } + + } + } + return nil +} + // NewDownsampleConfig returns a DownsampleConfig. func NewDownsampleConfig() *DownsampleConfig { r := &DownsampleConfig{} diff --git a/typedapi/types/dropprocessor.go b/typedapi/types/dropprocessor.go old mode 100755 new mode 100644 index 1d386fad3a..8af449ca86 --- a/typedapi/types/dropprocessor.go +++ b/typedapi/types/dropprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // DropProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L199-L199 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L199-L199 type DropProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` @@ -31,6 +41,69 @@ type DropProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *DropProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewDropProcessor returns a DropProcessor. func NewDropProcessor() *DropProcessor { r := &DropProcessor{} diff --git a/typedapi/types/duration.go b/typedapi/types/duration.go old mode 100755 new mode 100644 index 9dbd7cf424..188907cbc5 --- a/typedapi/types/duration.go +++ b/typedapi/types/duration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ package types // // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L52-L58 type Duration interface{} diff --git a/typedapi/types/durationvalueunitfloatmillis.go b/typedapi/types/durationvalueunitfloatmillis.go old mode 100755 new mode 100644 index c832d1f783..b0dc8b3485 --- a/typedapi/types/durationvalueunitfloatmillis.go +++ b/typedapi/types/durationvalueunitfloatmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DurationValueUnitFloatMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L67-L67 type DurationValueUnitFloatMillis Float64 diff --git a/typedapi/types/durationvalueunitmillis.go b/typedapi/types/durationvalueunitmillis.go old mode 100755 new mode 100644 index 86bbec60ee..087c92481c --- a/typedapi/types/durationvalueunitmillis.go +++ b/typedapi/types/durationvalueunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DurationValueUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L67-L67 type DurationValueUnitMillis int64 diff --git a/typedapi/types/durationvalueunitnanos.go b/typedapi/types/durationvalueunitnanos.go old mode 100755 new mode 100644 index 0b2cecf0f0..d741e8ba75 --- a/typedapi/types/durationvalueunitnanos.go +++ b/typedapi/types/durationvalueunitnanos.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DurationValueUnitNanos type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L67-L67 type DurationValueUnitNanos int64 diff --git a/typedapi/types/durationvalueunitseconds.go b/typedapi/types/durationvalueunitseconds.go old mode 100755 new mode 100644 index 4e15c8b04c..2d71d44c44 --- a/typedapi/types/durationvalueunitseconds.go +++ b/typedapi/types/durationvalueunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // DurationValueUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L67-L67 type DurationValueUnitSeconds int64 diff --git a/typedapi/types/dutchanalyzer.go b/typedapi/types/dutchanalyzer.go old mode 100755 new mode 100644 index 54f98151b2..3275e2d527 --- a/typedapi/types/dutchanalyzer.go +++ b/typedapi/types/dutchanalyzer.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // DutchAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L61-L64 type DutchAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` } +func (s *DutchAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewDutchAnalyzer returns a DutchAnalyzer. func NewDutchAnalyzer() *DutchAnalyzer { r := &DutchAnalyzer{} diff --git a/typedapi/types/dynamicproperty.go b/typedapi/types/dynamicproperty.go old mode 100755 new mode 100644 index b33d49acf9..489a54905e --- a/typedapi/types/dynamicproperty.go +++ b/typedapi/types/dynamicproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -31,12 +31,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // DynamicProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L275-L306 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L275-L306 type DynamicProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -74,6 +76,7 @@ type DynamicProperty struct { } func (s *DynamicProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -88,28 +91,71 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { switch t { case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -118,16 +164,37 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - if err := dec.Decode(&s.EagerGlobalOrdinals); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EagerGlobalOrdinals = &value + case bool: + s.EagerGlobalOrdinals = &v } case "enabled": - if err := dec.Decode(&s.Enabled); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -415,30 +482,62 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -447,8 +546,17 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "index_phrases": - if err := dec.Decode(&s.IndexPhrases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IndexPhrases = &value + case bool: + s.IndexPhrases = &v } case "index_prefixes": @@ -457,18 +565,33 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "locale": - if err := dec.Decode(&s.Locale); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Locale = &o case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "norms": - if err := dec.Decode(&s.Norms); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Norms = &value + case bool: + s.Norms = &v } case "null_value": @@ -482,16 +605,41 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "position_increment_gap": - if err := dec.Decode(&s.PositionIncrementGap); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PositionIncrementGap = &value + case float64: + f := int(v) + s.PositionIncrementGap = &f } case "precision_step": - if err := dec.Decode(&s.PrecisionStep); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrecisionStep = &value + case float64: + f := int(v) + s.PrecisionStep = &f } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -779,7 +927,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -791,23 +939,41 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "search_analyzer": - if err := dec.Decode(&s.SearchAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchAnalyzer = &o case "search_quote_analyzer": - if err := dec.Decode(&s.SearchQuoteAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchQuoteAnalyzer = &o case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "term_vector": diff --git a/typedapi/types/dynamictemplate.go b/typedapi/types/dynamictemplate.go old mode 100755 new mode 100644 index a006c9ecfd..464b909c5c --- a/typedapi/types/dynamictemplate.go +++ b/typedapi/types/dynamictemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,7 +32,7 @@ import ( // DynamicTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/dynamic-template.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/dynamic-template.ts#L22-L30 type DynamicTemplate struct { Mapping Property `json:"mapping,omitempty"` Match *string `json:"match,omitempty"` @@ -44,6 +44,7 @@ type DynamicTemplate struct { } func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -71,295 +72,301 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { case "binary": o := NewBinaryProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "boolean": o := NewBooleanProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "{dynamic_property}": o := NewDynamicProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "join": o := NewJoinProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "keyword": o := NewKeywordProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "match_only_text": o := NewMatchOnlyTextProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "percolator": o := NewPercolatorProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "rank_feature": o := NewRankFeatureProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "rank_features": o := NewRankFeaturesProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "search_as_you_type": o := NewSearchAsYouTypeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "text": o := NewTextProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "version": o := NewVersionProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "wildcard": o := NewWildcardProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "date_nanos": o := NewDateNanosProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "date": o := NewDateProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "aggregate_metric_double": o := NewAggregateMetricDoubleProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "dense_vector": o := NewDenseVectorProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "flattened": o := NewFlattenedProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "nested": o := NewNestedProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "object": o := NewObjectProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "completion": o := NewCompletionProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "constant_keyword": o := NewConstantKeywordProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "alias": o := NewFieldAliasProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "histogram": o := NewHistogramProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "ip": o := NewIpProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "murmur3": o := NewMurmur3HashProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "token_count": o := NewTokenCountProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "geo_point": o := NewGeoPointProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "geo_shape": o := NewGeoShapeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "point": o := NewPointProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "shape": o := NewShapeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "byte": o := NewByteNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "double": o := NewDoubleNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "float": o := NewFloatNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "half_float": o := NewHalfFloatNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "integer": o := NewIntegerNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "long": o := NewLongNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "scaled_float": o := NewScaledFloatNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "short": o := NewShortNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "unsigned_long": o := NewUnsignedLongNumberProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "date_range": o := NewDateRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "double_range": o := NewDoubleRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "float_range": o := NewFloatRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "integer_range": o := NewIntegerRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "ip_range": o := NewIpRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o case "long_range": o := NewLongRangeProperty() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Mapping = *o default: - if err := dec.Decode(&s.Mapping); err != nil { + if err := localDec.Decode(&s.Mapping); err != nil { return err } } case "match": - if err := dec.Decode(&s.Match); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Match = &o case "match_mapping_type": - if err := dec.Decode(&s.MatchMappingType); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.MatchMappingType = &o case "match_pattern": if err := dec.Decode(&s.MatchPattern); err != nil { @@ -367,19 +374,28 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { } case "path_match": - if err := dec.Decode(&s.PathMatch); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.PathMatch = &o case "path_unmatch": - if err := dec.Decode(&s.PathUnmatch); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.PathUnmatch = &o case "unmatch": - if err := dec.Decode(&s.Unmatch); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Unmatch = &o } } diff --git a/typedapi/types/edgengramtokenfilter.go b/typedapi/types/edgengramtokenfilter.go old mode 100755 new mode 100644 index cb0dc24dbb..0bb370ddd3 --- a/typedapi/types/edgengramtokenfilter.go +++ b/typedapi/types/edgengramtokenfilter.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/edgengramside" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // EdgeNGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L79-L85 type EdgeNGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -36,6 +44,87 @@ type EdgeNGramTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGram = &value + case float64: + f := int(v) + s.MaxGram = &f + } + + case "min_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinGram = &value + case float64: + f := int(v) + s.MinGram = &f + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "side": + if err := dec.Decode(&s.Side); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewEdgeNGramTokenFilter returns a EdgeNGramTokenFilter. func NewEdgeNGramTokenFilter() *EdgeNGramTokenFilter { r := &EdgeNGramTokenFilter{} diff --git a/typedapi/types/edgengramtokenizer.go b/typedapi/types/edgengramtokenizer.go old mode 100755 new mode 100644 index 6718aaa9dd..52be23d38c --- a/typedapi/types/edgengramtokenizer.go +++ b/typedapi/types/edgengramtokenizer.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tokenchar" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // EdgeNGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L30-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L30-L36 type EdgeNGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -36,6 +44,81 @@ type EdgeNGramTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "custom_token_chars": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CustomTokenChars = &o + + case "max_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGram = value + case float64: + f := int(v) + s.MaxGram = f + } + + case "min_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinGram = value + case float64: + f := int(v) + s.MinGram = f + } + + case "token_chars": + if err := dec.Decode(&s.TokenChars); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewEdgeNGramTokenizer returns a EdgeNGramTokenizer. func NewEdgeNGramTokenizer() *EdgeNGramTokenizer { r := &EdgeNGramTokenizer{} diff --git a/typedapi/types/elasticsearchversioninfo.go b/typedapi/types/elasticsearchversioninfo.go old mode 100755 new mode 100644 index 6d7db05f21..9067884059 --- a/typedapi/types/elasticsearchversioninfo.go +++ b/typedapi/types/elasticsearchversioninfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ElasticsearchVersionInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Base.ts#L54-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Base.ts#L54-L64 type ElasticsearchVersionInfo struct { BuildDate DateTime `json:"build_date"` BuildFlavor string `json:"build_flavor"` @@ -35,6 +45,92 @@ type ElasticsearchVersionInfo struct { MinimumWireCompatibilityVersion string `json:"minimum_wire_compatibility_version"` } +func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build_date": + if err := dec.Decode(&s.BuildDate); err != nil { + return err + } + + case "build_flavor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildFlavor = o + + case "build_hash": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildHash = o + + case "build_snapshot": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BuildSnapshot = value + case bool: + s.BuildSnapshot = v + } + + case "build_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildType = o + + case "number": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Int = o + + case "lucene_version": + if err := dec.Decode(&s.LuceneVersion); err != nil { + return err + } + + case "minimum_index_compatibility_version": + if err := dec.Decode(&s.MinimumIndexCompatibilityVersion); err != nil { + return err + } + + case "minimum_wire_compatibility_version": + if err := dec.Decode(&s.MinimumWireCompatibilityVersion); err != nil { + return err + } + + } + } + return nil +} + // NewElasticsearchVersionInfo returns a ElasticsearchVersionInfo. func NewElasticsearchVersionInfo() *ElasticsearchVersionInfo { r := &ElasticsearchVersionInfo{} diff --git a/typedapi/types/elisiontokenfilter.go b/typedapi/types/elisiontokenfilter.go old mode 100755 new mode 100644 index b4c9b8a093..28bac0aa23 --- a/typedapi/types/elisiontokenfilter.go +++ b/typedapi/types/elisiontokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ElisionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L186-L191 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L187-L192 type ElisionTokenFilter struct { Articles []string `json:"articles,omitempty"` ArticlesCase *bool `json:"articles_case,omitempty"` @@ -31,6 +41,63 @@ type ElisionTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *ElisionTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "articles": + if err := dec.Decode(&s.Articles); err != nil { + return err + } + + case "articles_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ArticlesCase = &value + case bool: + s.ArticlesCase = &v + } + + case "articles_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ArticlesPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewElisionTokenFilter returns a ElisionTokenFilter. func NewElisionTokenFilter() *ElisionTokenFilter { r := &ElisionTokenFilter{} diff --git a/typedapi/types/email.go b/typedapi/types/email.go old mode 100755 new mode 100644 index 18b2a923b6..c248da97fb --- a/typedapi/types/email.go +++ b/typedapi/types/email.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/emailpriority" + + "bytes" + "errors" + "io" + + "encoding/json" ) // Email type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L238-L250 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L238-L250 type Email struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` @@ -41,6 +47,90 @@ type Email struct { To []string `json:"to"` } +func (s *Email) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attachments": + if s.Attachments == nil { + s.Attachments = make(map[string]EmailAttachmentContainer, 0) + } + if err := dec.Decode(&s.Attachments); err != nil { + return err + } + + case "bcc": + if err := dec.Decode(&s.Bcc); err != nil { + return err + } + + case "body": + if err := dec.Decode(&s.Body); err != nil { + return err + } + + case "cc": + if err := dec.Decode(&s.Cc); err != nil { + return err + } + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.From = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "priority": + if err := dec.Decode(&s.Priority); err != nil { + return err + } + + case "reply_to": + if err := dec.Decode(&s.ReplyTo); err != nil { + return err + } + + case "sent_date": + if err := dec.Decode(&s.SentDate); err != nil { + return err + } + + case "subject": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Subject = o + + case "to": + if err := dec.Decode(&s.To); err != nil { + return err + } + + } + } + return nil +} + // NewEmail returns a Email. func NewEmail() *Email { r := &Email{ diff --git a/typedapi/types/emailaction.go b/typedapi/types/emailaction.go old mode 100755 new mode 100644 index 10753d260e..fc649d61dc --- a/typedapi/types/emailaction.go +++ b/typedapi/types/emailaction.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/emailpriority" + + "bytes" + "errors" + "io" + + "encoding/json" ) // EmailAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L252-L252 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L252-L252 type EmailAction struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` @@ -41,6 +47,90 @@ type EmailAction struct { To []string `json:"to"` } +func (s *EmailAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attachments": + if s.Attachments == nil { + s.Attachments = make(map[string]EmailAttachmentContainer, 0) + } + if err := dec.Decode(&s.Attachments); err != nil { + return err + } + + case "bcc": + if err := dec.Decode(&s.Bcc); err != nil { + return err + } + + case "body": + if err := dec.Decode(&s.Body); err != nil { + return err + } + + case "cc": + if err := dec.Decode(&s.Cc); err != nil { + return err + } + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.From = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "priority": + if err := dec.Decode(&s.Priority); err != nil { + return err + } + + case "reply_to": + if err := dec.Decode(&s.ReplyTo); err != nil { + return err + } + + case "sent_date": + if err := dec.Decode(&s.SentDate); err != nil { + return err + } + + case "subject": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Subject = o + + case "to": + if err := dec.Decode(&s.To); err != nil { + return err + } + + } + } + return nil +} + // NewEmailAction returns a EmailAction. func NewEmailAction() *EmailAction { r := &EmailAction{ diff --git a/typedapi/types/emailattachmentcontainer.go b/typedapi/types/emailattachmentcontainer.go old mode 100755 new mode 100644 index 7ffc16ca77..4bb512ada4 --- a/typedapi/types/emailattachmentcontainer.go +++ b/typedapi/types/emailattachmentcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EmailAttachmentContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L211-L216 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L211-L216 type EmailAttachmentContainer struct { Data *DataEmailAttachment `json:"data,omitempty"` Http *HttpEmailAttachment `json:"http,omitempty"` diff --git a/typedapi/types/emailbody.go b/typedapi/types/emailbody.go old mode 100755 new mode 100644 index 257332e548..fda158fcfc --- a/typedapi/types/emailbody.go +++ b/typedapi/types/emailbody.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EmailBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L192-L195 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L192-L195 type EmailBody struct { Html *string `json:"html,omitempty"` Text *string `json:"text,omitempty"` diff --git a/typedapi/types/emailresult.go b/typedapi/types/emailresult.go old mode 100755 new mode 100644 index f8aba9461f..0247633da4 --- a/typedapi/types/emailresult.go +++ b/typedapi/types/emailresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EmailResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L205-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L205-L209 type EmailResult struct { Account *string `json:"account,omitempty"` Message Email `json:"message"` diff --git a/typedapi/types/emptyobject.go b/typedapi/types/emptyobject.go old mode 100755 new mode 100644 index 2d6496ea40..6427bcfd46 --- a/typedapi/types/emptyobject.go +++ b/typedapi/types/emptyobject.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EmptyObject type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L140-L141 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L140-L141 type EmptyObject struct { } diff --git a/typedapi/types/enrichpolicy.go b/typedapi/types/enrichpolicy.go old mode 100755 new mode 100644 index 1435b28971..4094ee902a --- a/typedapi/types/enrichpolicy.go +++ b/typedapi/types/enrichpolicy.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // EnrichPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/_types/Policy.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/_types/Policy.ts#L33-L40 type EnrichPolicy struct { ElasticsearchVersion *string `json:"elasticsearch_version,omitempty"` EnrichFields []string `json:"enrich_fields"` @@ -32,6 +40,84 @@ type EnrichPolicy struct { Query *string `json:"query,omitempty"` } +func (s *EnrichPolicy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "elasticsearch_version": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ElasticsearchVersion = &o + + case "enrich_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.EnrichFields = append(s.EnrichFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.EnrichFields); err != nil { + return err + } + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "match_field": + if err := dec.Decode(&s.MatchField); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = &o + + } + } + return nil +} + // NewEnrichPolicy returns a EnrichPolicy. func NewEnrichPolicy() *EnrichPolicy { r := &EnrichPolicy{} diff --git a/typedapi/types/enrichprocessor.go b/typedapi/types/enrichprocessor.go old mode 100755 new mode 100644 index e789390354..2f47d6afab --- a/typedapi/types/enrichprocessor.go +++ b/typedapi/types/enrichprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoshaperelation" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // EnrichProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L201-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L201-L209 type EnrichProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -42,6 +50,136 @@ type EnrichProcessor struct { TargetField string `json:"target_field"` } +func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "max_matches": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxMatches = &value + case float64: + f := int(v) + s.MaxMatches = &f + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "override": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Override = &value + case bool: + s.Override = &v + } + + case "policy_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PolicyName = o + + case "shape_relation": + if err := dec.Decode(&s.ShapeRelation); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewEnrichProcessor returns a EnrichProcessor. func NewEnrichProcessor() *EnrichProcessor { r := &EnrichProcessor{} diff --git a/typedapi/types/ensemble.go b/typedapi/types/ensemble.go old mode 100755 new mode 100644 index 6c5eec633d..1387c158fd --- a/typedapi/types/ensemble.go +++ b/typedapi/types/ensemble.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Ensemble type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L93-L99 type Ensemble struct { AggregateOutput *AggregateOutput `json:"aggregate_output,omitempty"` ClassificationLabels []string `json:"classification_labels,omitempty"` diff --git a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go old mode 100755 new mode 100644 index 7311a078d2..d5c7388f7b --- a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go +++ b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package accesstokengranttype package accesstokengranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/types.ts#L23-L28 type AccessTokenGrantType struct { Name string } @@ -43,7 +43,7 @@ func (a AccessTokenGrantType) MarshalText() (text []byte, err error) { } func (a *AccessTokenGrantType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "password": *a = Password diff --git a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go old mode 100755 new mode 100644 index 71f4370b7d..efc97a557e --- a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go +++ b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package acknowledgementoptions package acknowledgementoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L106-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L106-L110 type AcknowledgementOptions struct { Name string } @@ -41,7 +41,7 @@ func (a AcknowledgementOptions) MarshalText() (text []byte, err error) { } func (a *AcknowledgementOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "awaits_successful_execution": *a = Awaitssuccessfulexecution diff --git a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go old mode 100755 new mode 100644 index f193d29c99..5cf800f18e --- a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go +++ b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package actionexecutionmode package actionexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L70-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L70-L91 type ActionExecutionMode struct { Name string } @@ -45,7 +45,7 @@ func (a ActionExecutionMode) MarshalText() (text []byte, err error) { } func (a *ActionExecutionMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "simulate": *a = Simulate diff --git a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go old mode 100755 new mode 100644 index 5b6e81106f..95b7bdbf65 --- a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go +++ b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package actionstatusoptions package actionstatusoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L99-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L99-L104 type ActionStatusOptions struct { Name string } @@ -43,7 +43,7 @@ func (a ActionStatusOptions) MarshalText() (text []byte, err error) { } func (a *ActionStatusOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "success": *a = Success diff --git a/typedapi/types/enums/actiontype/actiontype.go b/typedapi/types/enums/actiontype/actiontype.go old mode 100755 new mode 100644 index 333a0e4b34..2bbc609062 --- a/typedapi/types/enums/actiontype/actiontype.go +++ b/typedapi/types/enums/actiontype/actiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package actiontype package actiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L61-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L61-L68 type ActionType struct { Name string } @@ -47,7 +47,7 @@ func (a ActionType) MarshalText() (text []byte, err error) { } func (a *ActionType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "email": *a = Email diff --git a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go old mode 100755 new mode 100644 index 527adba692..437532b1f0 --- a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go +++ b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package allocationexplaindecision package allocationexplaindecision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L32-L37 type AllocationExplainDecision struct { Name string } @@ -43,7 +43,7 @@ func (a AllocationExplainDecision) MarshalText() (text []byte, err error) { } func (a *AllocationExplainDecision) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "NO": *a = NO diff --git a/typedapi/types/enums/apikeygranttype/apikeygranttype.go b/typedapi/types/enums/apikeygranttype/apikeygranttype.go old mode 100755 new mode 100644 index d1859c9fc1..eb564743b9 --- a/typedapi/types/enums/apikeygranttype/apikeygranttype.go +++ b/typedapi/types/enums/apikeygranttype/apikeygranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package apikeygranttype package apikeygranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/grant_api_key/types.ts#L34-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/grant_api_key/types.ts#L34-L37 type ApiKeyGrantType struct { Name string } @@ -39,7 +39,7 @@ func (a ApiKeyGrantType) MarshalText() (text []byte, err error) { } func (a *ApiKeyGrantType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "access_token": *a = Accesstoken diff --git a/typedapi/types/enums/appliesto/appliesto.go b/typedapi/types/enums/appliesto/appliesto.go old mode 100755 new mode 100644 index 45f2ec593d..8f19f1e665 --- a/typedapi/types/enums/appliesto/appliesto.go +++ b/typedapi/types/enums/appliesto/appliesto.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package appliesto package appliesto import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Rule.ts#L67-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Rule.ts#L67-L72 type AppliesTo struct { Name string } @@ -43,7 +43,7 @@ func (a AppliesTo) MarshalText() (text []byte, err error) { } func (a *AppliesTo) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "actual": *a = Actual diff --git a/typedapi/types/enums/boundaryscanner/boundaryscanner.go b/typedapi/types/enums/boundaryscanner/boundaryscanner.go old mode 100755 new mode 100644 index 0222957ab2..9a2a41fdf0 --- a/typedapi/types/enums/boundaryscanner/boundaryscanner.go +++ b/typedapi/types/enums/boundaryscanner/boundaryscanner.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package boundaryscanner package boundaryscanner import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L27-L31 type BoundaryScanner struct { Name string } @@ -41,7 +41,7 @@ func (b BoundaryScanner) MarshalText() (text []byte, err error) { } func (b *BoundaryScanner) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "chars": *b = Chars diff --git a/typedapi/types/enums/bytes/bytes.go b/typedapi/types/enums/bytes/bytes.go old mode 100755 new mode 100644 index 7b5f13d2d3..c6e3093570 --- a/typedapi/types/enums/bytes/bytes.go +++ b/typedapi/types/enums/bytes/bytes.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package bytes package bytes import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L149-L167 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L149-L167 type Bytes struct { Name string } @@ -47,7 +47,7 @@ func (b Bytes) MarshalText() (text []byte, err error) { } func (b *Bytes) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "b": *b = B diff --git a/typedapi/types/enums/calendarinterval/calendarinterval.go b/typedapi/types/enums/calendarinterval/calendarinterval.go old mode 100755 new mode 100644 index 29967e69f1..dba9fc9e6c --- a/typedapi/types/enums/calendarinterval/calendarinterval.go +++ b/typedapi/types/enums/calendarinterval/calendarinterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package calendarinterval package calendarinterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L112-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L112-L129 type CalendarInterval struct { Name string } @@ -51,7 +51,7 @@ func (c CalendarInterval) MarshalText() (text []byte, err error) { } func (c *CalendarInterval) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "second": *c = Second diff --git a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go old mode 100755 new mode 100644 index 74b03c706a..1298152f93 --- a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go +++ b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package cardinalityexecutionmode package cardinalityexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L54-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L54-L60 type CardinalityExecutionMode struct { Name string } @@ -45,7 +45,7 @@ func (c CardinalityExecutionMode) MarshalText() (text []byte, err error) { } func (c *CardinalityExecutionMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "global_ordinals": *c = Globalordinals diff --git a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go old mode 100755 new mode 100644 index 2a2fdb5e77..3ad2108d0a --- a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go +++ b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package catanomalydetectorcolumn package catanomalydetectorcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L32-L401 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L32-L401 type CatAnomalyDetectorColumn struct { Name string } @@ -155,7 +155,7 @@ func (c CatAnomalyDetectorColumn) MarshalText() (text []byte, err error) { } func (c *CatAnomalyDetectorColumn) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "assignment_explanation": *c = Assignmentexplanation diff --git a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go old mode 100755 new mode 100644 index 4fc5442483..2d4db6e5a7 --- a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go +++ b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package catdatafeedcolumn package catdatafeedcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L405-L471 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L405-L471 type CatDatafeedColumn struct { Name string } @@ -59,7 +59,7 @@ func (c CatDatafeedColumn) MarshalText() (text []byte, err error) { } func (c *CatDatafeedColumn) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "ae": *c = Ae diff --git a/typedapi/types/enums/catdfacolumn/catdfacolumn.go b/typedapi/types/enums/catdfacolumn/catdfacolumn.go old mode 100755 new mode 100644 index ca19bab2a3..469a2423be --- a/typedapi/types/enums/catdfacolumn/catdfacolumn.go +++ b/typedapi/types/enums/catdfacolumn/catdfacolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package catdfacolumn package catdfacolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L472-L557 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L472-L557 type CatDfaColumn struct { Name string } @@ -67,7 +67,7 @@ func (c CatDfaColumn) MarshalText() (text []byte, err error) { } func (c *CatDfaColumn) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "assignment_explanation": *c = Assignmentexplanation diff --git a/typedapi/types/enums/categorizationstatus/categorizationstatus.go b/typedapi/types/enums/categorizationstatus/categorizationstatus.go old mode 100755 new mode 100644 index b8fdec49c7..16b8a27ca4 --- a/typedapi/types/enums/categorizationstatus/categorizationstatus.go +++ b/typedapi/types/enums/categorizationstatus/categorizationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package categorizationstatus package categorizationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L80-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L80-L83 type CategorizationStatus struct { Name string } @@ -39,7 +39,7 @@ func (c CategorizationStatus) MarshalText() (text []byte, err error) { } func (c *CategorizationStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "ok": *c = Ok diff --git a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go old mode 100755 new mode 100644 index 53b17d25b0..711420f68b --- a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go +++ b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package cattrainedmodelscolumn package cattrainedmodelscolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L561-L635 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L561-L635 type CatTrainedModelsColumn struct { Name string } @@ -63,7 +63,7 @@ func (c CatTrainedModelsColumn) MarshalText() (text []byte, err error) { } func (c *CatTrainedModelsColumn) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "create_time": *c = Createtime diff --git a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go old mode 100755 new mode 100644 index 3e39bedbe1..d5a46c594d --- a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go +++ b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package cattransformcolumn package cattransformcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/_types/CatBase.ts#L640-L844 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/_types/CatBase.ts#L640-L844 type CatTransformColumn struct { Name string } @@ -101,7 +101,7 @@ func (c CatTransformColumn) MarshalText() (text []byte, err error) { } func (c *CatTransformColumn) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "changes_last_detection_time": *c = Changeslastdetectiontime diff --git a/typedapi/types/enums/childscoremode/childscoremode.go b/typedapi/types/enums/childscoremode/childscoremode.go old mode 100755 new mode 100644 index 9f0470af9c..bc6b297814 --- a/typedapi/types/enums/childscoremode/childscoremode.go +++ b/typedapi/types/enums/childscoremode/childscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package childscoremode package childscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/joining.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/joining.ts#L25-L39 type ChildScoreMode struct { Name string } @@ -45,7 +45,7 @@ func (c ChildScoreMode) MarshalText() (text []byte, err error) { } func (c *ChildScoreMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *c = None diff --git a/typedapi/types/enums/chunkingmode/chunkingmode.go b/typedapi/types/enums/chunkingmode/chunkingmode.go old mode 100755 new mode 100644 index 521bfc8c70..9a489c6266 --- a/typedapi/types/enums/chunkingmode/chunkingmode.go +++ b/typedapi/types/enums/chunkingmode/chunkingmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package chunkingmode package chunkingmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L171-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L171-L175 type ChunkingMode struct { Name string } @@ -41,7 +41,7 @@ func (c ChunkingMode) MarshalText() (text []byte, err error) { } func (c *ChunkingMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "auto": *c = Auto diff --git a/typedapi/types/enums/clusterprivilege/clusterprivilege.go b/typedapi/types/enums/clusterprivilege/clusterprivilege.go old mode 100755 new mode 100644 index 18fc00f94d..eff0af3ad5 --- a/typedapi/types/enums/clusterprivilege/clusterprivilege.go +++ b/typedapi/types/enums/clusterprivilege/clusterprivilege.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package clusterprivilege package clusterprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L41-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L41-L80 type ClusterPrivilege struct { Name string } @@ -109,7 +109,7 @@ func (c ClusterPrivilege) MarshalText() (text []byte, err error) { } func (c *ClusterPrivilege) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *c = All diff --git a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go old mode 100755 new mode 100644 index 74d2e73d0e..c7dc2fd6eb --- a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go +++ b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package combinedfieldsoperator package combinedfieldsoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L202-L205 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L202-L205 type CombinedFieldsOperator struct { Name string } @@ -39,7 +39,7 @@ func (c CombinedFieldsOperator) MarshalText() (text []byte, err error) { } func (c *CombinedFieldsOperator) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "or": *c = Or diff --git a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go old mode 100755 new mode 100644 index 2337cbc975..db286765a6 --- a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go +++ b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package combinedfieldszeroterms package combinedfieldszeroterms import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L207-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L207-L210 type CombinedFieldsZeroTerms struct { Name string } @@ -39,7 +39,7 @@ func (c CombinedFieldsZeroTerms) MarshalText() (text []byte, err error) { } func (c *CombinedFieldsZeroTerms) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *c = None diff --git a/typedapi/types/enums/conditionop/conditionop.go b/typedapi/types/enums/conditionop/conditionop.go old mode 100755 new mode 100644 index 06cb1a1bc0..c42cf986bc --- a/typedapi/types/enums/conditionop/conditionop.go +++ b/typedapi/types/enums/conditionop/conditionop.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package conditionop package conditionop import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L38-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L38-L45 type ConditionOp struct { Name string } @@ -47,7 +47,7 @@ func (c ConditionOp) MarshalText() (text []byte, err error) { } func (c *ConditionOp) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "not_eq": *c = Noteq diff --git a/typedapi/types/enums/conditionoperator/conditionoperator.go b/typedapi/types/enums/conditionoperator/conditionoperator.go old mode 100755 new mode 100644 index cca5220401..269222bc35 --- a/typedapi/types/enums/conditionoperator/conditionoperator.go +++ b/typedapi/types/enums/conditionoperator/conditionoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package conditionoperator package conditionoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Rule.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Rule.ts#L74-L79 type ConditionOperator struct { Name string } @@ -43,7 +43,7 @@ func (c ConditionOperator) MarshalText() (text []byte, err error) { } func (c *ConditionOperator) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "gt": *c = Gt diff --git a/typedapi/types/enums/conditiontype/conditiontype.go b/typedapi/types/enums/conditiontype/conditiontype.go old mode 100755 new mode 100644 index a4615c17f4..342a1f0f97 --- a/typedapi/types/enums/conditiontype/conditiontype.go +++ b/typedapi/types/enums/conditiontype/conditiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package conditiontype package conditiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L61-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L61-L67 type ConditionType struct { Name string } @@ -45,7 +45,7 @@ func (c ConditionType) MarshalText() (text []byte, err error) { } func (c *ConditionType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "always": *c = Always diff --git a/typedapi/types/enums/conflicts/conflicts.go b/typedapi/types/enums/conflicts/conflicts.go old mode 100755 new mode 100644 index 4d454a140d..9eb67865f2 --- a/typedapi/types/enums/conflicts/conflicts.go +++ b/typedapi/types/enums/conflicts/conflicts.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package conflicts package conflicts import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L169-L172 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L169-L172 type Conflicts struct { Name string } @@ -39,7 +39,7 @@ func (c Conflicts) MarshalText() (text []byte, err error) { } func (c *Conflicts) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "abort": *c = Abort diff --git a/typedapi/types/enums/connectionscheme/connectionscheme.go b/typedapi/types/enums/connectionscheme/connectionscheme.go old mode 100755 new mode 100644 index 1d4c42a567..9d72b9303d --- a/typedapi/types/enums/connectionscheme/connectionscheme.go +++ b/typedapi/types/enums/connectionscheme/connectionscheme.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package connectionscheme package connectionscheme import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L39-L42 type ConnectionScheme struct { Name string } @@ -39,7 +39,7 @@ func (c ConnectionScheme) MarshalText() (text []byte, err error) { } func (c *ConnectionScheme) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "http": *c = Http diff --git a/typedapi/types/enums/converttype/converttype.go b/typedapi/types/enums/converttype/converttype.go old mode 100755 new mode 100644 index 78c42a31f1..a94cbbf772 --- a/typedapi/types/enums/converttype/converttype.go +++ b/typedapi/types/enums/converttype/converttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package converttype package converttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L137-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L137-L145 type ConvertType struct { Name string } @@ -49,7 +49,7 @@ func (c ConvertType) MarshalText() (text []byte, err error) { } func (c *ConvertType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "integer": *c = Integer diff --git a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go old mode 100755 new mode 100644 index 1a08d99feb..994fe6c9dc --- a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go +++ b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dataattachmentformat package dataattachmentformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L187-L190 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L187-L190 type DataAttachmentFormat struct { Name string } @@ -39,7 +39,7 @@ func (d DataAttachmentFormat) MarshalText() (text []byte, err error) { } func (d *DataAttachmentFormat) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "json": *d = Json diff --git a/typedapi/types/enums/datafeedstate/datafeedstate.go b/typedapi/types/enums/datafeedstate/datafeedstate.go old mode 100755 new mode 100644 index b33cd2d517..e2214026f2 --- a/typedapi/types/enums/datafeedstate/datafeedstate.go +++ b/typedapi/types/enums/datafeedstate/datafeedstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package datafeedstate package datafeedstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L133-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L133-L138 type DatafeedState struct { Name string } @@ -43,7 +43,7 @@ func (d DatafeedState) MarshalText() (text []byte, err error) { } func (d *DatafeedState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "started": *d = Started diff --git a/typedapi/types/enums/dataframestate/dataframestate.go b/typedapi/types/enums/dataframestate/dataframestate.go old mode 100755 new mode 100644 index 4f80e16f5d..a88e526786 --- a/typedapi/types/enums/dataframestate/dataframestate.go +++ b/typedapi/types/enums/dataframestate/dataframestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dataframestate package dataframestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Dataframe.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Dataframe.ts#L20-L26 type DataframeState struct { Name string } @@ -45,7 +45,7 @@ func (d DataframeState) MarshalText() (text []byte, err error) { } func (d *DataframeState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "started": *d = Started diff --git a/typedapi/types/enums/day/day.go b/typedapi/types/enums/day/day.go old mode 100755 new mode 100644 index 5478b4ce5e..ff2c814fd7 --- a/typedapi/types/enums/day/day.go +++ b/typedapi/types/enums/day/day.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package day package day import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L37-L45 type Day struct { Name string } @@ -49,7 +49,7 @@ func (d Day) MarshalText() (text []byte, err error) { } func (d *Day) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "sunday": *d = Sunday diff --git a/typedapi/types/enums/decision/decision.go b/typedapi/types/enums/decision/decision.go old mode 100755 new mode 100644 index 810eb05f7a..137f2a16b1 --- a/typedapi/types/enums/decision/decision.go +++ b/typedapi/types/enums/decision/decision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package decision package decision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L86-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L86-L95 type Decision struct { Name string } @@ -51,7 +51,7 @@ func (d Decision) MarshalText() (text []byte, err error) { } func (d *Decision) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "yes": *d = Yes diff --git a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go old mode 100755 new mode 100644 index 9007e17674..0492ae7783 --- a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go +++ b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package delimitedpayloadencoding package delimitedpayloadencoding import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L61-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L62-L66 type DelimitedPayloadEncoding struct { Name string } @@ -41,7 +41,7 @@ func (d DelimitedPayloadEncoding) MarshalText() (text []byte, err error) { } func (d *DelimitedPayloadEncoding) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "int": *d = Int diff --git a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go old mode 100755 new mode 100644 index d16fa39b05..c357af5bbb --- a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go +++ b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package deploymentallocationstate package deploymentallocationstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L278-L291 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L280-L293 type DeploymentAllocationState struct { Name string } @@ -41,7 +41,7 @@ func (d DeploymentAllocationState) MarshalText() (text []byte, err error) { } func (d *DeploymentAllocationState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "started": *d = Started diff --git a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go old mode 100755 new mode 100644 index 10bb848f2a..6fd59415dd --- a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go +++ b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package deploymentassignmentstate package deploymentassignmentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L293-L298 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L295-L300 type DeploymentAssignmentState struct { Name string } @@ -43,7 +43,7 @@ func (d DeploymentAssignmentState) MarshalText() (text []byte, err error) { } func (d *DeploymentAssignmentState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "starting": *d = Starting diff --git a/typedapi/types/enums/deploymentstate/deploymentstate.go b/typedapi/types/enums/deploymentstate/deploymentstate.go old mode 100755 new mode 100644 index 046439eadc..70bc1fcf63 --- a/typedapi/types/enums/deploymentstate/deploymentstate.go +++ b/typedapi/types/enums/deploymentstate/deploymentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package deploymentstate package deploymentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L263-L276 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L265-L278 type DeploymentState struct { Name string } @@ -41,7 +41,7 @@ func (d DeploymentState) MarshalText() (text []byte, err error) { } func (d *DeploymentState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "started": *d = Started diff --git a/typedapi/types/enums/deprecationlevel/deprecationlevel.go b/typedapi/types/enums/deprecationlevel/deprecationlevel.go old mode 100755 new mode 100644 index 9d69e3d6ae..362fb3cfe6 --- a/typedapi/types/enums/deprecationlevel/deprecationlevel.go +++ b/typedapi/types/enums/deprecationlevel/deprecationlevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package deprecationlevel package deprecationlevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/deprecations/types.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/deprecations/types.ts#L20-L27 type DeprecationLevel struct { Name string } @@ -43,7 +43,7 @@ func (d DeprecationLevel) MarshalText() (text []byte, err error) { } func (d *DeprecationLevel) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *d = None diff --git a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go old mode 100755 new mode 100644 index 7775a2a068..e6af939b09 --- a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go +++ b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dfiindependencemeasure package dfiindependencemeasure import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L20-L24 type DFIIndependenceMeasure struct { Name string } @@ -41,7 +41,7 @@ func (d DFIIndependenceMeasure) MarshalText() (text []byte, err error) { } func (d *DFIIndependenceMeasure) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "standardized": *d = Standardized diff --git a/typedapi/types/enums/dfraftereffect/dfraftereffect.go b/typedapi/types/enums/dfraftereffect/dfraftereffect.go old mode 100755 new mode 100644 index 854196417b..64c3d480c8 --- a/typedapi/types/enums/dfraftereffect/dfraftereffect.go +++ b/typedapi/types/enums/dfraftereffect/dfraftereffect.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dfraftereffect package dfraftereffect import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L26-L30 type DFRAfterEffect struct { Name string } @@ -41,7 +41,7 @@ func (d DFRAfterEffect) MarshalText() (text []byte, err error) { } func (d *DFRAfterEffect) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "no": *d = No diff --git a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go old mode 100755 new mode 100644 index 71f1fa2c62..67214d94e1 --- a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go +++ b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dfrbasicmodel package dfrbasicmodel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L32-L40 type DFRBasicModel struct { Name string } @@ -49,7 +49,7 @@ func (d DFRBasicModel) MarshalText() (text []byte, err error) { } func (d *DFRBasicModel) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "be": *d = Be diff --git a/typedapi/types/enums/distanceunit/distanceunit.go b/typedapi/types/enums/distanceunit/distanceunit.go old mode 100755 new mode 100644 index ff5fdd04a2..29d9e7098a --- a/typedapi/types/enums/distanceunit/distanceunit.go +++ b/typedapi/types/enums/distanceunit/distanceunit.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package distanceunit package distanceunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L30-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L30-L49 type DistanceUnit struct { Name string } @@ -53,7 +53,7 @@ func (d DistanceUnit) MarshalText() (text []byte, err error) { } func (d *DistanceUnit) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "in": *d = In diff --git a/typedapi/types/enums/dynamicmapping/dynamicmapping.go b/typedapi/types/enums/dynamicmapping/dynamicmapping.go old mode 100755 new mode 100644 index 4add4b0ce4..20f33a3484 --- a/typedapi/types/enums/dynamicmapping/dynamicmapping.go +++ b/typedapi/types/enums/dynamicmapping/dynamicmapping.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package dynamicmapping package dynamicmapping import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/dynamic-template.ts#L37-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/dynamic-template.ts#L37-L46 type DynamicMapping struct { Name string } @@ -38,12 +38,16 @@ var ( False = DynamicMapping{"false"} ) +func (d *DynamicMapping) UnmarshalJSON(data []byte) error { + return d.UnmarshalText(data) +} + func (d DynamicMapping) MarshalText() (text []byte, err error) { return []byte(d.String()), nil } func (d *DynamicMapping) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "strict": *d = Strict diff --git a/typedapi/types/enums/edgengramside/edgengramside.go b/typedapi/types/enums/edgengramside/edgengramside.go old mode 100755 new mode 100644 index cc1d960878..c4162d64fc --- a/typedapi/types/enums/edgengramside/edgengramside.go +++ b/typedapi/types/enums/edgengramside/edgengramside.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package edgengramside package edgengramside import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L73-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L74-L77 type EdgeNGramSide struct { Name string } @@ -39,7 +39,7 @@ func (e EdgeNGramSide) MarshalText() (text []byte, err error) { } func (e *EdgeNGramSide) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "front": *e = Front diff --git a/typedapi/types/enums/emailpriority/emailpriority.go b/typedapi/types/enums/emailpriority/emailpriority.go old mode 100755 new mode 100644 index 5102c0c804..85fd79da35 --- a/typedapi/types/enums/emailpriority/emailpriority.go +++ b/typedapi/types/enums/emailpriority/emailpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package emailpriority package emailpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L197-L203 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L197-L203 type EmailPriority struct { Name string } @@ -45,7 +45,7 @@ func (e EmailPriority) MarshalText() (text []byte, err error) { } func (e *EmailPriority) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "lowest": *e = Lowest diff --git a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go old mode 100755 new mode 100644 index ed2c274baf..789286183f --- a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go +++ b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package enrichpolicyphase package enrichpolicyphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/execute_policy/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/execute_policy/types.ts#L24-L29 type EnrichPolicyPhase struct { Name string } @@ -43,7 +43,7 @@ func (e EnrichPolicyPhase) MarshalText() (text []byte, err error) { } func (e *EnrichPolicyPhase) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "SCHEDULED": *e = SCHEDULED diff --git a/typedapi/types/enums/excludefrequent/excludefrequent.go b/typedapi/types/enums/excludefrequent/excludefrequent.go old mode 100755 new mode 100644 index 46738eb646..8850813769 --- a/typedapi/types/enums/excludefrequent/excludefrequent.go +++ b/typedapi/types/enums/excludefrequent/excludefrequent.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package excludefrequent package excludefrequent import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Detector.ts#L82-L87 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Detector.ts#L82-L87 type ExcludeFrequent struct { Name string } @@ -43,7 +43,7 @@ func (e ExcludeFrequent) MarshalText() (text []byte, err error) { } func (e *ExcludeFrequent) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *e = All diff --git a/typedapi/types/enums/executionphase/executionphase.go b/typedapi/types/enums/executionphase/executionphase.go old mode 100755 new mode 100644 index c61ff606cb..55993dcae5 --- a/typedapi/types/enums/executionphase/executionphase.go +++ b/typedapi/types/enums/executionphase/executionphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package executionphase package executionphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L49-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L49-L58 type ExecutionPhase struct { Name string } @@ -51,7 +51,7 @@ func (e ExecutionPhase) MarshalText() (text []byte, err error) { } func (e *ExecutionPhase) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "awaits_execution": *e = Awaitsexecution diff --git a/typedapi/types/enums/executionstatus/executionstatus.go b/typedapi/types/enums/executionstatus/executionstatus.go old mode 100755 new mode 100644 index a2f20977ae..3c8b7a7ab3 --- a/typedapi/types/enums/executionstatus/executionstatus.go +++ b/typedapi/types/enums/executionstatus/executionstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package executionstatus package executionstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L38-L47 type ExecutionStatus struct { Name string } @@ -51,7 +51,7 @@ func (e ExecutionStatus) MarshalText() (text []byte, err error) { } func (e *ExecutionStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "awaits_execution": *e = Awaitsexecution diff --git a/typedapi/types/enums/expandwildcard/expandwildcard.go b/typedapi/types/enums/expandwildcard/expandwildcard.go old mode 100755 new mode 100644 index 99b8c65fc8..4126abcd7e --- a/typedapi/types/enums/expandwildcard/expandwildcard.go +++ b/typedapi/types/enums/expandwildcard/expandwildcard.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package expandwildcard package expandwildcard import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L181-L195 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L181-L195 type ExpandWildcard struct { Name string } @@ -45,7 +45,7 @@ func (e ExpandWildcard) MarshalText() (text []byte, err error) { } func (e *ExpandWildcard) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *e = All diff --git a/typedapi/types/enums/feature/feature.go b/typedapi/types/enums/feature/feature.go old mode 100755 new mode 100644 index 3eeb0e2d79..e0417ecdb3 --- a/typedapi/types/enums/feature/feature.go +++ b/typedapi/types/enums/feature/feature.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package feature package feature import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get/IndicesGetRequest.ts#L89-L93 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get/IndicesGetRequest.ts#L89-L93 type Feature struct { Name string } @@ -41,7 +41,7 @@ func (f Feature) MarshalText() (text []byte, err error) { } func (f *Feature) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "aliases": *f = Aliases diff --git a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go old mode 100755 new mode 100644 index f81f086ed5..d21d67a46d --- a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go +++ b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package fieldsortnumerictype package fieldsortnumerictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L37-L42 type FieldSortNumericType struct { Name string } @@ -43,7 +43,7 @@ func (f FieldSortNumericType) MarshalText() (text []byte, err error) { } func (f *FieldSortNumericType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "long": *f = Long diff --git a/typedapi/types/enums/fieldtype/fieldtype.go b/typedapi/types/enums/fieldtype/fieldtype.go old mode 100755 new mode 100644 index 61a999c32e..5f9288d805 --- a/typedapi/types/enums/fieldtype/fieldtype.go +++ b/typedapi/types/enums/fieldtype/fieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package fieldtype package fieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/Property.ts#L158-L201 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/Property.ts#L158-L201 type FieldType struct { Name string } @@ -119,7 +119,7 @@ func (f FieldType) MarshalText() (text []byte, err error) { } func (f *FieldType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *f = None diff --git a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go old mode 100755 new mode 100644 index 2157098d26..cdefe4379b --- a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go +++ b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package fieldvaluefactormodifier package fieldvaluefactormodifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L147-L158 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L147-L158 type FieldValueFactorModifier struct { Name string } @@ -55,7 +55,7 @@ func (f FieldValueFactorModifier) MarshalText() (text []byte, err error) { } func (f *FieldValueFactorModifier) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *f = None diff --git a/typedapi/types/enums/filtertype/filtertype.go b/typedapi/types/enums/filtertype/filtertype.go old mode 100755 new mode 100644 index 6e66355a5d..db389a546b --- a/typedapi/types/enums/filtertype/filtertype.go +++ b/typedapi/types/enums/filtertype/filtertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package filtertype package filtertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Filter.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Filter.ts#L43-L46 type FilterType struct { Name string } @@ -39,7 +39,7 @@ func (f FilterType) MarshalText() (text []byte, err error) { } func (f *FilterType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "include": *f = Include diff --git a/typedapi/types/enums/followerindexstatus/followerindexstatus.go b/typedapi/types/enums/followerindexstatus/followerindexstatus.go old mode 100755 new mode 100644 index 684647f652..305416103a --- a/typedapi/types/enums/followerindexstatus/followerindexstatus.go +++ b/typedapi/types/enums/followerindexstatus/followerindexstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package followerindexstatus package followerindexstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow_info/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow_info/types.ts#L30-L33 type FollowerIndexStatus struct { Name string } @@ -39,7 +39,7 @@ func (f FollowerIndexStatus) MarshalText() (text []byte, err error) { } func (f *FollowerIndexStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "active": *f = Active diff --git a/typedapi/types/enums/functionboostmode/functionboostmode.go b/typedapi/types/enums/functionboostmode/functionboostmode.go old mode 100755 new mode 100644 index eccae5e8b3..3ea5a344ae --- a/typedapi/types/enums/functionboostmode/functionboostmode.go +++ b/typedapi/types/enums/functionboostmode/functionboostmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package functionboostmode package functionboostmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L138-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L138-L145 type FunctionBoostMode struct { Name string } @@ -47,7 +47,7 @@ func (f FunctionBoostMode) MarshalText() (text []byte, err error) { } func (f *FunctionBoostMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "multiply": *f = Multiply diff --git a/typedapi/types/enums/functionscoremode/functionscoremode.go b/typedapi/types/enums/functionscoremode/functionscoremode.go old mode 100755 new mode 100644 index 345a950a96..f3a3c9ae2b --- a/typedapi/types/enums/functionscoremode/functionscoremode.go +++ b/typedapi/types/enums/functionscoremode/functionscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package functionscoremode package functionscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L129-L136 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L129-L136 type FunctionScoreMode struct { Name string } @@ -47,7 +47,7 @@ func (f FunctionScoreMode) MarshalText() (text []byte, err error) { } func (f *FunctionScoreMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "multiply": *f = Multiply diff --git a/typedapi/types/enums/gappolicy/gappolicy.go b/typedapi/types/enums/gappolicy/gappolicy.go old mode 100755 new mode 100644 index 304c92159d..091f5d9a0a --- a/typedapi/types/enums/gappolicy/gappolicy.go +++ b/typedapi/types/enums/gappolicy/gappolicy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package gappolicy package gappolicy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L52-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L52-L67 type GapPolicy struct { Name string } @@ -41,7 +41,7 @@ func (g GapPolicy) MarshalText() (text []byte, err error) { } func (g *GapPolicy) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "skip": *g = Skip diff --git a/typedapi/types/enums/geodistancetype/geodistancetype.go b/typedapi/types/enums/geodistancetype/geodistancetype.go old mode 100755 new mode 100644 index 6cce96800f..350d1821b3 --- a/typedapi/types/enums/geodistancetype/geodistancetype.go +++ b/typedapi/types/enums/geodistancetype/geodistancetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geodistancetype package geodistancetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L51-L54 type GeoDistanceType struct { Name string } @@ -39,7 +39,7 @@ func (g GeoDistanceType) MarshalText() (text []byte, err error) { } func (g *GeoDistanceType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "arc": *g = Arc diff --git a/typedapi/types/enums/geoexecution/geoexecution.go b/typedapi/types/enums/geoexecution/geoexecution.go old mode 100755 new mode 100644 index ae7e69d585..536d824fef --- a/typedapi/types/enums/geoexecution/geoexecution.go +++ b/typedapi/types/enums/geoexecution/geoexecution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geoexecution package geoexecution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L43-L46 type GeoExecution struct { Name string } @@ -39,7 +39,7 @@ func (g GeoExecution) MarshalText() (text []byte, err error) { } func (g *GeoExecution) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "memory": *g = Memory diff --git a/typedapi/types/enums/geoorientation/geoorientation.go b/typedapi/types/enums/geoorientation/geoorientation.go old mode 100755 new mode 100644 index 24404be14a..f842b2e2aa --- a/typedapi/types/enums/geoorientation/geoorientation.go +++ b/typedapi/types/enums/geoorientation/geoorientation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geoorientation package geoorientation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L30-L35 type GeoOrientation struct { Name string } @@ -39,7 +39,7 @@ func (g GeoOrientation) MarshalText() (text []byte, err error) { } func (g *GeoOrientation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "right": *g = Right diff --git a/typedapi/types/enums/geoshaperelation/geoshaperelation.go b/typedapi/types/enums/geoshaperelation/geoshaperelation.go old mode 100755 new mode 100644 index 11805d7434..ff481e76f7 --- a/typedapi/types/enums/geoshaperelation/geoshaperelation.go +++ b/typedapi/types/enums/geoshaperelation/geoshaperelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geoshaperelation package geoshaperelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L67-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L67-L72 type GeoShapeRelation struct { Name string } @@ -43,7 +43,7 @@ func (g GeoShapeRelation) MarshalText() (text []byte, err error) { } func (g *GeoShapeRelation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "intersects": *g = Intersects diff --git a/typedapi/types/enums/geostrategy/geostrategy.go b/typedapi/types/enums/geostrategy/geostrategy.go old mode 100755 new mode 100644 index e18e587cb6..2c40591b3d --- a/typedapi/types/enums/geostrategy/geostrategy.go +++ b/typedapi/types/enums/geostrategy/geostrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geostrategy package geostrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L52-L55 type GeoStrategy struct { Name string } @@ -39,7 +39,7 @@ func (g GeoStrategy) MarshalText() (text []byte, err error) { } func (g *GeoStrategy) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "recursive": *g = Recursive diff --git a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go old mode 100755 new mode 100644 index faab4daafb..011343dd40 --- a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go +++ b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package geovalidationmethod package geovalidationmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L107-L111 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L107-L111 type GeoValidationMethod struct { Name string } @@ -41,7 +41,7 @@ func (g GeoValidationMethod) MarshalText() (text []byte, err error) { } func (g *GeoValidationMethod) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "coerce": *g = Coerce diff --git a/typedapi/types/enums/granttype/granttype.go b/typedapi/types/enums/granttype/granttype.go old mode 100755 new mode 100644 index aef2986cf4..9742fd356b --- a/typedapi/types/enums/granttype/granttype.go +++ b/typedapi/types/enums/granttype/granttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package granttype package granttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/GrantType.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/GrantType.ts#L20-L23 type GrantType struct { Name string } @@ -39,7 +39,7 @@ func (g GrantType) MarshalText() (text []byte, err error) { } func (g *GrantType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "password": *g = Password diff --git a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go old mode 100755 new mode 100644 index 46893df571..d5d5fe0c85 --- a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go +++ b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package gridaggregationtype package gridaggregationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_mvt/_types/GridType.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_mvt/_types/GridType.ts#L27-L30 type GridAggregationType struct { Name string } @@ -39,7 +39,7 @@ func (g GridAggregationType) MarshalText() (text []byte, err error) { } func (g *GridAggregationType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "geotile": *g = Geotile diff --git a/typedapi/types/enums/gridtype/gridtype.go b/typedapi/types/enums/gridtype/gridtype.go old mode 100755 new mode 100644 index 778296a040..1e620f7dfa --- a/typedapi/types/enums/gridtype/gridtype.go +++ b/typedapi/types/enums/gridtype/gridtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package gridtype package gridtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_mvt/_types/GridType.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_mvt/_types/GridType.ts#L20-L25 type GridType struct { Name string } @@ -41,7 +41,7 @@ func (g GridType) MarshalText() (text []byte, err error) { } func (g *GridType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "grid": *g = Grid diff --git a/typedapi/types/enums/groupby/groupby.go b/typedapi/types/enums/groupby/groupby.go old mode 100755 new mode 100644 index 5715bb2aa1..f942a73365 --- a/typedapi/types/enums/groupby/groupby.go +++ b/typedapi/types/enums/groupby/groupby.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package groupby package groupby import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/GroupBy.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/GroupBy.ts#L20-L27 type GroupBy struct { Name string } @@ -41,7 +41,7 @@ func (g GroupBy) MarshalText() (text []byte, err error) { } func (g *GroupBy) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "nodes": *g = Nodes diff --git a/typedapi/types/enums/healthstatus/healthstatus.go b/typedapi/types/enums/healthstatus/healthstatus.go old mode 100755 new mode 100644 index c6742b5cb6..e4c6ca2a2a --- a/typedapi/types/enums/healthstatus/healthstatus.go +++ b/typedapi/types/enums/healthstatus/healthstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package healthstatus package healthstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L199-L219 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L199-L219 type HealthStatus struct { Name string } @@ -41,7 +41,7 @@ func (h HealthStatus) MarshalText() (text []byte, err error) { } func (h *HealthStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "green": *h = Green diff --git a/typedapi/types/enums/highlighterencoder/highlighterencoder.go b/typedapi/types/enums/highlighterencoder/highlighterencoder.go old mode 100755 new mode 100644 index 6f609528f5..8420f0f188 --- a/typedapi/types/enums/highlighterencoder/highlighterencoder.go +++ b/typedapi/types/enums/highlighterencoder/highlighterencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package highlighterencoder package highlighterencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L62-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L62-L65 type HighlighterEncoder struct { Name string } @@ -39,7 +39,7 @@ func (h HighlighterEncoder) MarshalText() (text []byte, err error) { } func (h *HighlighterEncoder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "default": *h = Default diff --git a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go old mode 100755 new mode 100644 index 7e37983859..491dbc36ec --- a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go +++ b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package highlighterfragmenter package highlighterfragmenter import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L67-L70 type HighlighterFragmenter struct { Name string } @@ -39,7 +39,7 @@ func (h HighlighterFragmenter) MarshalText() (text []byte, err error) { } func (h *HighlighterFragmenter) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "simple": *h = Simple diff --git a/typedapi/types/enums/highlighterorder/highlighterorder.go b/typedapi/types/enums/highlighterorder/highlighterorder.go old mode 100755 new mode 100644 index 8b9c8348b3..9698986ae2 --- a/typedapi/types/enums/highlighterorder/highlighterorder.go +++ b/typedapi/types/enums/highlighterorder/highlighterorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package highlighterorder package highlighterorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L72-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L72-L74 type HighlighterOrder struct { Name string } @@ -37,7 +37,7 @@ func (h HighlighterOrder) MarshalText() (text []byte, err error) { } func (h *HighlighterOrder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "score": *h = Score diff --git a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go old mode 100755 new mode 100644 index 148fb94714..0d8250f3c5 --- a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go +++ b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package highlightertagsschema package highlightertagsschema import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L76-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L76-L78 type HighlighterTagsSchema struct { Name string } @@ -37,7 +37,7 @@ func (h HighlighterTagsSchema) MarshalText() (text []byte, err error) { } func (h *HighlighterTagsSchema) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "styled": *h = Styled diff --git a/typedapi/types/enums/highlightertype/highlightertype.go b/typedapi/types/enums/highlightertype/highlightertype.go old mode 100755 new mode 100644 index 8bcf3d83e6..9112bdcbf1 --- a/typedapi/types/enums/highlightertype/highlightertype.go +++ b/typedapi/types/enums/highlightertype/highlightertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package highlightertype package highlightertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L80-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L80-L86 type HighlighterType struct { Name string } @@ -41,7 +41,7 @@ func (h HighlighterType) MarshalText() (text []byte, err error) { } func (h *HighlighterType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "plain": *h = Plain diff --git a/typedapi/types/enums/holtwinterstype/holtwinterstype.go b/typedapi/types/enums/holtwinterstype/holtwinterstype.go old mode 100755 new mode 100644 index d9a4c7a6dd..5ebb212626 --- a/typedapi/types/enums/holtwinterstype/holtwinterstype.go +++ b/typedapi/types/enums/holtwinterstype/holtwinterstype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package holtwinterstype package holtwinterstype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L243-L248 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L243-L248 type HoltWintersType struct { Name string } @@ -39,7 +39,7 @@ func (h HoltWintersType) MarshalText() (text []byte, err error) { } func (h *HoltWintersType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "add": *h = Add diff --git a/typedapi/types/enums/httpinputmethod/httpinputmethod.go b/typedapi/types/enums/httpinputmethod/httpinputmethod.go old mode 100755 new mode 100644 index 71d2082e99..bcd75da87c --- a/typedapi/types/enums/httpinputmethod/httpinputmethod.go +++ b/typedapi/types/enums/httpinputmethod/httpinputmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package httpinputmethod package httpinputmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L59-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L59-L65 type HttpInputMethod struct { Name string } @@ -45,7 +45,7 @@ func (h HttpInputMethod) MarshalText() (text []byte, err error) { } func (h *HttpInputMethod) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "head": *h = Head diff --git a/typedapi/types/enums/ibdistribution/ibdistribution.go b/typedapi/types/enums/ibdistribution/ibdistribution.go old mode 100755 new mode 100644 index ff094fa1f3..5dbc4d20ea --- a/typedapi/types/enums/ibdistribution/ibdistribution.go +++ b/typedapi/types/enums/ibdistribution/ibdistribution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package ibdistribution package ibdistribution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L42-L45 type IBDistribution struct { Name string } @@ -39,7 +39,7 @@ func (i IBDistribution) MarshalText() (text []byte, err error) { } func (i *IBDistribution) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "ll": *i = Ll diff --git a/typedapi/types/enums/iblambda/iblambda.go b/typedapi/types/enums/iblambda/iblambda.go old mode 100755 new mode 100644 index 7544980290..c69d74af4f --- a/typedapi/types/enums/iblambda/iblambda.go +++ b/typedapi/types/enums/iblambda/iblambda.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package iblambda package iblambda import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L47-L50 type IBLambda struct { Name string } @@ -39,7 +39,7 @@ func (i IBLambda) MarshalText() (text []byte, err error) { } func (i *IBLambda) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "df": *i = Df diff --git a/typedapi/types/enums/icucollationalternate/icucollationalternate.go b/typedapi/types/enums/icucollationalternate/icucollationalternate.go old mode 100755 new mode 100644 index 75d2bb16a9..659e61e728 --- a/typedapi/types/enums/icucollationalternate/icucollationalternate.go +++ b/typedapi/types/enums/icucollationalternate/icucollationalternate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icucollationalternate package icucollationalternate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L89-L92 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L89-L92 type IcuCollationAlternate struct { Name string } @@ -39,7 +39,7 @@ func (i IcuCollationAlternate) MarshalText() (text []byte, err error) { } func (i *IcuCollationAlternate) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "shifted": *i = Shifted diff --git a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go old mode 100755 new mode 100644 index 5abf9bb927..e38a259024 --- a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go +++ b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icucollationcasefirst package icucollationcasefirst import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L94-L97 type IcuCollationCaseFirst struct { Name string } @@ -39,7 +39,7 @@ func (i IcuCollationCaseFirst) MarshalText() (text []byte, err error) { } func (i *IcuCollationCaseFirst) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "lower": *i = Lower diff --git a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go old mode 100755 new mode 100644 index b76115089c..7093fe637e --- a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go +++ b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icucollationdecomposition package icucollationdecomposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L99-L102 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L99-L102 type IcuCollationDecomposition struct { Name string } @@ -39,7 +39,7 @@ func (i IcuCollationDecomposition) MarshalText() (text []byte, err error) { } func (i *IcuCollationDecomposition) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "no": *i = No diff --git a/typedapi/types/enums/icucollationstrength/icucollationstrength.go b/typedapi/types/enums/icucollationstrength/icucollationstrength.go old mode 100755 new mode 100644 index a321a7ee81..e062927d6f --- a/typedapi/types/enums/icucollationstrength/icucollationstrength.go +++ b/typedapi/types/enums/icucollationstrength/icucollationstrength.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icucollationstrength package icucollationstrength import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L104-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L104-L110 type IcuCollationStrength struct { Name string } @@ -45,7 +45,7 @@ func (i IcuCollationStrength) MarshalText() (text []byte, err error) { } func (i *IcuCollationStrength) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "primary": *i = Primary diff --git a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go old mode 100755 new mode 100644 index 4495a951c6..de1910560a --- a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go +++ b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icunormalizationmode package icunormalizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L78-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L78-L81 type IcuNormalizationMode struct { Name string } @@ -39,7 +39,7 @@ func (i IcuNormalizationMode) MarshalText() (text []byte, err error) { } func (i *IcuNormalizationMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "decompose": *i = Decompose diff --git a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go old mode 100755 new mode 100644 index 97b94e5638..3feaea71d4 --- a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go +++ b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icunormalizationtype package icunormalizationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L83-L87 type IcuNormalizationType struct { Name string } @@ -41,7 +41,7 @@ func (i IcuNormalizationType) MarshalText() (text []byte, err error) { } func (i *IcuNormalizationType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "nfc": *i = Nfc diff --git a/typedapi/types/enums/icutransformdirection/icutransformdirection.go b/typedapi/types/enums/icutransformdirection/icutransformdirection.go old mode 100755 new mode 100644 index 4b9e3f5b21..dae19cf9ad --- a/typedapi/types/enums/icutransformdirection/icutransformdirection.go +++ b/typedapi/types/enums/icutransformdirection/icutransformdirection.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package icutransformdirection package icutransformdirection import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L73-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L73-L76 type IcuTransformDirection struct { Name string } @@ -39,7 +39,7 @@ func (i IcuTransformDirection) MarshalText() (text []byte, err error) { } func (i *IcuTransformDirection) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "forward": *i = Forward diff --git a/typedapi/types/enums/impactarea/impactarea.go b/typedapi/types/enums/impactarea/impactarea.go new file mode 100644 index 0000000000..34383cd169 --- /dev/null +++ b/typedapi/types/enums/impactarea/impactarea.go @@ -0,0 +1,65 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Package impactarea +package impactarea + +import "strings" + +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L71-L76 +type ImpactArea struct { + Name string +} + +var ( + Search = ImpactArea{"search"} + + Ingest = ImpactArea{"ingest"} + + Backup = ImpactArea{"backup"} + + Deploymentmanagement = ImpactArea{"deployment_management"} +) + +func (i ImpactArea) MarshalText() (text []byte, err error) { + return []byte(i.String()), nil +} + +func (i *ImpactArea) UnmarshalText(text []byte) error { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { + + case "search": + *i = Search + case "ingest": + *i = Ingest + case "backup": + *i = Backup + case "deployment_management": + *i = Deploymentmanagement + default: + *i = ImpactArea{string(text)} + } + + return nil +} + +func (i ImpactArea) String() string { + return i.Name +} diff --git a/typedapi/types/enums/include/include.go b/typedapi/types/enums/include/include.go old mode 100755 new mode 100644 index 525616d7ef..5671238b85 --- a/typedapi/types/enums/include/include.go +++ b/typedapi/types/enums/include/include.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package include package include import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Include.ts#L20-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Include.ts#L20-L47 type Include struct { Name string } @@ -36,6 +36,8 @@ var ( Hyperparameters = Include{"hyperparameters"} Totalfeatureimportance = Include{"total_feature_importance"} + + Definitionstatus = Include{"definition_status"} ) func (i Include) MarshalText() (text []byte, err error) { @@ -43,7 +45,7 @@ func (i Include) MarshalText() (text []byte, err error) { } func (i *Include) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "definition": *i = Definition @@ -53,6 +55,8 @@ func (i *Include) UnmarshalText(text []byte) error { *i = Hyperparameters case "total_feature_importance": *i = Totalfeatureimportance + case "definition_status": + *i = Definitionstatus default: *i = Include{string(text)} } diff --git a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go old mode 100755 new mode 100644 index 7c2944130f..b5c19466c8 --- a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go +++ b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexcheckonstartup package indexcheckonstartup import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L253-L260 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L253-L260 type IndexCheckOnStartup struct { Name string } @@ -36,12 +36,16 @@ var ( Checksum = IndexCheckOnStartup{"checksum"} ) +func (i *IndexCheckOnStartup) UnmarshalJSON(data []byte) error { + return i.UnmarshalText(data) +} + func (i IndexCheckOnStartup) MarshalText() (text []byte, err error) { return []byte(i.String()), nil } func (i *IndexCheckOnStartup) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "true": *i = True diff --git a/typedapi/types/enums/indexingjobstate/indexingjobstate.go b/typedapi/types/enums/indexingjobstate/indexingjobstate.go old mode 100755 new mode 100644 index 67efc1d8ce..aac44358a2 --- a/typedapi/types/enums/indexingjobstate/indexingjobstate.go +++ b/typedapi/types/enums/indexingjobstate/indexingjobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexingjobstate package indexingjobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/types.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/types.ts#L66-L72 type IndexingJobState struct { Name string } @@ -45,7 +45,7 @@ func (i IndexingJobState) MarshalText() (text []byte, err error) { } func (i *IndexingJobState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "started": *i = Started diff --git a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go old mode 100755 new mode 100644 index 5bb315e8bc..a265f875bf --- a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go +++ b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexmetadatastate package indexmetadatastate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L213-L219 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L213-L219 type IndexMetadataState struct { Name string } @@ -39,7 +39,7 @@ func (i IndexMetadataState) MarshalText() (text []byte, err error) { } func (i *IndexMetadataState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "open": *i = Open diff --git a/typedapi/types/enums/indexoptions/indexoptions.go b/typedapi/types/enums/indexoptions/indexoptions.go old mode 100755 new mode 100644 index 56a7f1cc9a..b88c585c1f --- a/typedapi/types/enums/indexoptions/indexoptions.go +++ b/typedapi/types/enums/indexoptions/indexoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexoptions package indexoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L235-L240 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L235-L240 type IndexOptions struct { Name string } @@ -43,7 +43,7 @@ func (i IndexOptions) MarshalText() (text []byte, err error) { } func (i *IndexOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "docs": *i = Docs diff --git a/typedapi/types/enums/indexprivilege/indexprivilege.go b/typedapi/types/enums/indexprivilege/indexprivilege.go old mode 100755 new mode 100644 index 3116c3283b..6a910b86ee --- a/typedapi/types/enums/indexprivilege/indexprivilege.go +++ b/typedapi/types/enums/indexprivilege/indexprivilege.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexprivilege package indexprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L165-L185 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L166-L187 type IndexPrivilege struct { Name string } @@ -73,7 +73,7 @@ func (i IndexPrivilege) MarshalText() (text []byte, err error) { } func (i *IndexPrivilege) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "none": *i = None diff --git a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go old mode 100755 new mode 100644 index b37f389f5c..ee98edda60 --- a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go +++ b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexroutingallocationoptions package indexroutingallocationoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L38-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L38-L43 type IndexRoutingAllocationOptions struct { Name string } @@ -43,7 +43,7 @@ func (i IndexRoutingAllocationOptions) MarshalText() (text []byte, err error) { } func (i *IndexRoutingAllocationOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *i = All diff --git a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go old mode 100755 new mode 100644 index c8646c9f68..06d8fc2b15 --- a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go +++ b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indexroutingrebalanceoptions package indexroutingrebalanceoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L45-L50 type IndexRoutingRebalanceOptions struct { Name string } @@ -43,7 +43,7 @@ func (i IndexRoutingRebalanceOptions) MarshalText() (text []byte, err error) { } func (i *IndexRoutingRebalanceOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *i = All diff --git a/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go new file mode 100644 index 0000000000..32b8f48241 --- /dev/null +++ b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go @@ -0,0 +1,65 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +// Package indicatorhealthstatus +package indicatorhealthstatus + +import "strings" + +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L25-L30 +type IndicatorHealthStatus struct { + Name string +} + +var ( + Green = IndicatorHealthStatus{"green"} + + Yellow = IndicatorHealthStatus{"yellow"} + + Red = IndicatorHealthStatus{"red"} + + Unknown = IndicatorHealthStatus{"unknown"} +) + +func (i IndicatorHealthStatus) MarshalText() (text []byte, err error) { + return []byte(i.String()), nil +} + +func (i *IndicatorHealthStatus) UnmarshalText(text []byte) error { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { + + case "green": + *i = Green + case "yellow": + *i = Yellow + case "red": + *i = Red + case "unknown": + *i = Unknown + default: + *i = IndicatorHealthStatus{string(text)} + } + + return nil +} + +func (i IndicatorHealthStatus) String() string { + return i.Name +} diff --git a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go old mode 100755 new mode 100644 index 51d6128ddd..cf52eac9d2 --- a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go +++ b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package indicesblockoptions package indicesblockoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 type IndicesBlockOptions struct { Name string } @@ -43,7 +43,7 @@ func (i IndicesBlockOptions) MarshalText() (text []byte, err error) { } func (i *IndicesBlockOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "metadata": *i = Metadata diff --git a/typedapi/types/enums/inputtype/inputtype.go b/typedapi/types/enums/inputtype/inputtype.go old mode 100755 new mode 100644 index d3976259c1..3caf1663a8 --- a/typedapi/types/enums/inputtype/inputtype.go +++ b/typedapi/types/enums/inputtype/inputtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package inputtype package inputtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L100-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L100-L104 type InputType struct { Name string } @@ -41,7 +41,7 @@ func (i InputType) MarshalText() (text []byte, err error) { } func (i *InputType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "http": *i = Http diff --git a/typedapi/types/enums/jobblockedreason/jobblockedreason.go b/typedapi/types/enums/jobblockedreason/jobblockedreason.go old mode 100755 new mode 100644 index 15b8151179..e72be7cbf2 --- a/typedapi/types/enums/jobblockedreason/jobblockedreason.go +++ b/typedapi/types/enums/jobblockedreason/jobblockedreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package jobblockedreason package jobblockedreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L174-L178 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L174-L178 type JobBlockedReason struct { Name string } @@ -41,7 +41,7 @@ func (j JobBlockedReason) MarshalText() (text []byte, err error) { } func (j *JobBlockedReason) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "delete": *j = Delete diff --git a/typedapi/types/enums/jobstate/jobstate.go b/typedapi/types/enums/jobstate/jobstate.go old mode 100755 new mode 100644 index dbf487a11e..7b52b604ae --- a/typedapi/types/enums/jobstate/jobstate.go +++ b/typedapi/types/enums/jobstate/jobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package jobstate package jobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L36-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L36-L42 type JobState struct { Name string } @@ -45,7 +45,7 @@ func (j JobState) MarshalText() (text []byte, err error) { } func (j *JobState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "closing": *j = Closing diff --git a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go old mode 100755 new mode 100644 index 4e944b3753..b17ad91163 --- a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go +++ b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package jsonprocessorconflictstrategy package jsonprocessorconflictstrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L279-L284 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L279-L284 type JsonProcessorConflictStrategy struct { Name string } @@ -39,7 +39,7 @@ func (j JsonProcessorConflictStrategy) MarshalText() (text []byte, err error) { } func (j *JsonProcessorConflictStrategy) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "replace": *j = Replace diff --git a/typedapi/types/enums/keeptypesmode/keeptypesmode.go b/typedapi/types/enums/keeptypesmode/keeptypesmode.go old mode 100755 new mode 100644 index b685a5e135..18f45a6eb3 --- a/typedapi/types/enums/keeptypesmode/keeptypesmode.go +++ b/typedapi/types/enums/keeptypesmode/keeptypesmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package keeptypesmode package keeptypesmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L212-L215 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L213-L216 type KeepTypesMode struct { Name string } @@ -39,7 +39,7 @@ func (k KeepTypesMode) MarshalText() (text []byte, err error) { } func (k *KeepTypesMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "include": *k = Include diff --git a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go old mode 100755 new mode 100644 index 90ef146390..53b87c80b6 --- a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go +++ b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package kuromojitokenizationmode package kuromojitokenizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 type KuromojiTokenizationMode struct { Name string } @@ -41,7 +41,7 @@ func (k KuromojiTokenizationMode) MarshalText() (text []byte, err error) { } func (k *KuromojiTokenizationMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "normal": *k = Normal diff --git a/typedapi/types/enums/language/language.go b/typedapi/types/enums/language/language.go old mode 100755 new mode 100644 index d4fe3ffd1b..fd22029f25 --- a/typedapi/types/enums/language/language.go +++ b/typedapi/types/enums/language/language.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package language package language import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/languages.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/languages.ts#L20-L55 type Language struct { Name string } @@ -103,7 +103,7 @@ func (l Language) MarshalText() (text []byte, err error) { } func (l *Language) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "Arabic": *l = Arabic diff --git a/typedapi/types/enums/level/level.go b/typedapi/types/enums/level/level.go old mode 100755 new mode 100644 index a3629dc7eb..0936bbfbe9 --- a/typedapi/types/enums/level/level.go +++ b/typedapi/types/enums/level/level.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package level package level import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L229-L233 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L229-L233 type Level struct { Name string } @@ -41,7 +41,7 @@ func (l Level) MarshalText() (text []byte, err error) { } func (l *Level) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "cluster": *l = Cluster diff --git a/typedapi/types/enums/licensestatus/licensestatus.go b/typedapi/types/enums/licensestatus/licensestatus.go old mode 100755 new mode 100644 index f6648d5bab..bec112c878 --- a/typedapi/types/enums/licensestatus/licensestatus.go +++ b/typedapi/types/enums/licensestatus/licensestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package licensestatus package licensestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/_types/License.ts#L35-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/_types/License.ts#L35-L40 type LicenseStatus struct { Name string } @@ -43,7 +43,7 @@ func (l LicenseStatus) MarshalText() (text []byte, err error) { } func (l *LicenseStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "active": *l = Active diff --git a/typedapi/types/enums/licensetype/licensetype.go b/typedapi/types/enums/licensetype/licensetype.go old mode 100755 new mode 100644 index 1da97535f7..4393834bb7 --- a/typedapi/types/enums/licensetype/licensetype.go +++ b/typedapi/types/enums/licensetype/licensetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package licensetype package licensetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/_types/License.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/_types/License.ts#L23-L33 type LicenseType struct { Name string } @@ -53,7 +53,7 @@ func (l LicenseType) MarshalText() (text []byte, err error) { } func (l *LicenseType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "missing": *l = Missing diff --git a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go old mode 100755 new mode 100644 index 9dbb29d57b..f337ddf33e --- a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go +++ b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package lifecycleoperationmode package lifecycleoperationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Lifecycle.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Lifecycle.ts#L20-L24 type LifecycleOperationMode struct { Name string } @@ -41,7 +41,7 @@ func (l LifecycleOperationMode) MarshalText() (text []byte, err error) { } func (l *LifecycleOperationMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "RUNNING": *l = RUNNING diff --git a/typedapi/types/enums/matchtype/matchtype.go b/typedapi/types/enums/matchtype/matchtype.go old mode 100755 new mode 100644 index 44b7f946a3..6aa6a00311 --- a/typedapi/types/enums/matchtype/matchtype.go +++ b/typedapi/types/enums/matchtype/matchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package matchtype package matchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/dynamic-template.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/dynamic-template.ts#L32-L35 type MatchType struct { Name string } @@ -39,7 +39,7 @@ func (m MatchType) MarshalText() (text []byte, err error) { } func (m *MatchType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "simple": *m = Simple diff --git a/typedapi/types/enums/memorystatus/memorystatus.go b/typedapi/types/enums/memorystatus/memorystatus.go old mode 100755 new mode 100644 index 3fc469267b..08f202a82d --- a/typedapi/types/enums/memorystatus/memorystatus.go +++ b/typedapi/types/enums/memorystatus/memorystatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package memorystatus package memorystatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L85-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L85-L89 type MemoryStatus struct { Name string } @@ -41,7 +41,7 @@ func (m MemoryStatus) MarshalText() (text []byte, err error) { } func (m *MemoryStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "ok": *m = Ok diff --git a/typedapi/types/enums/metric/metric.go b/typedapi/types/enums/metric/metric.go old mode 100755 new mode 100644 index 75e7919987..c4747b9a55 --- a/typedapi/types/enums/metric/metric.go +++ b/typedapi/types/enums/metric/metric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package metric package metric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Metric.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Metric.ts#L22-L28 type Metric struct { Name string } @@ -45,7 +45,7 @@ func (m Metric) MarshalText() (text []byte, err error) { } func (m *Metric) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "min": *m = Min diff --git a/typedapi/types/enums/migrationstatus/migrationstatus.go b/typedapi/types/enums/migrationstatus/migrationstatus.go old mode 100755 new mode 100644 index 02fda2a74a..76d89e370f --- a/typedapi/types/enums/migrationstatus/migrationstatus.go +++ b/typedapi/types/enums/migrationstatus/migrationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package migrationstatus package migrationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 type MigrationStatus struct { Name string } @@ -43,7 +43,7 @@ func (m MigrationStatus) MarshalText() (text []byte, err error) { } func (m *MigrationStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "NO_MIGRATION_NEEDED": *m = NOMIGRATIONNEEDED diff --git a/typedapi/types/enums/minimuminterval/minimuminterval.go b/typedapi/types/enums/minimuminterval/minimuminterval.go old mode 100755 new mode 100644 index 0e1bd12ad5..907f9cf442 --- a/typedapi/types/enums/minimuminterval/minimuminterval.go +++ b/typedapi/types/enums/minimuminterval/minimuminterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package minimuminterval package minimuminterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L64-L71 type MinimumInterval struct { Name string } @@ -47,7 +47,7 @@ func (m MinimumInterval) MarshalText() (text []byte, err error) { } func (m *MinimumInterval) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "second": *m = Second diff --git a/typedapi/types/enums/missingorder/missingorder.go b/typedapi/types/enums/missingorder/missingorder.go old mode 100755 new mode 100644 index e37f77a16d..c161045799 --- a/typedapi/types/enums/missingorder/missingorder.go +++ b/typedapi/types/enums/missingorder/missingorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package missingorder package missingorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/AggregationContainer.ts#L212-L216 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/AggregationContainer.ts#L214-L218 type MissingOrder struct { Name string } @@ -41,7 +41,7 @@ func (m MissingOrder) MarshalText() (text []byte, err error) { } func (m *MissingOrder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "first": *m = First diff --git a/typedapi/types/enums/month/month.go b/typedapi/types/enums/month/month.go old mode 100755 new mode 100644 index ef1f2b0cff..88689f29ad --- a/typedapi/types/enums/month/month.go +++ b/typedapi/types/enums/month/month.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package month package month import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L70-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L70-L83 type Month struct { Name string } @@ -59,7 +59,7 @@ func (m Month) MarshalText() (text []byte, err error) { } func (m *Month) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "january": *m = January diff --git a/typedapi/types/enums/multivaluemode/multivaluemode.go b/typedapi/types/enums/multivaluemode/multivaluemode.go old mode 100755 new mode 100644 index 5e47e57da1..ee58137f98 --- a/typedapi/types/enums/multivaluemode/multivaluemode.go +++ b/typedapi/types/enums/multivaluemode/multivaluemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package multivaluemode package multivaluemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L160-L165 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L160-L165 type MultiValueMode struct { Name string } @@ -43,7 +43,7 @@ func (m MultiValueMode) MarshalText() (text []byte, err error) { } func (m *MultiValueMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "min": *m = Min diff --git a/typedapi/types/enums/noderole/noderole.go b/typedapi/types/enums/noderole/noderole.go old mode 100755 new mode 100644 index 0eae5af87a..1b9707a021 --- a/typedapi/types/enums/noderole/noderole.go +++ b/typedapi/types/enums/noderole/noderole.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package noderole package noderole import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L76-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L76-L94 type NodeRole struct { Name string } @@ -63,7 +63,7 @@ func (n NodeRole) MarshalText() (text []byte, err error) { } func (n *NodeRole) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "master": *n = Master diff --git a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go old mode 100755 new mode 100644 index 7c42a6c4ae..6ab29ce533 --- a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go +++ b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package noridecompoundmode package noridecompoundmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L74-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L74-L78 type NoriDecompoundMode struct { Name string } @@ -41,7 +41,7 @@ func (n NoriDecompoundMode) MarshalText() (text []byte, err error) { } func (n *NoriDecompoundMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "discard": *n = Discard diff --git a/typedapi/types/enums/normalization/normalization.go b/typedapi/types/enums/normalization/normalization.go old mode 100755 new mode 100644 index e05b6d5ff7..45a2bee999 --- a/typedapi/types/enums/normalization/normalization.go +++ b/typedapi/types/enums/normalization/normalization.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package normalization package normalization import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Similarity.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Similarity.ts#L52-L58 type Normalization struct { Name string } @@ -45,7 +45,7 @@ func (n Normalization) MarshalText() (text []byte, err error) { } func (n *Normalization) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "no": *n = No diff --git a/typedapi/types/enums/normalizemethod/normalizemethod.go b/typedapi/types/enums/normalizemethod/normalizemethod.go old mode 100755 new mode 100644 index 5579ab7442..907a6b5b4a --- a/typedapi/types/enums/normalizemethod/normalizemethod.go +++ b/typedapi/types/enums/normalizemethod/normalizemethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package normalizemethod package normalizemethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L266-L274 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L266-L274 type NormalizeMethod struct { Name string } @@ -47,7 +47,7 @@ func (n NormalizeMethod) MarshalText() (text []byte, err error) { } func (n *NormalizeMethod) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "rescale_0_1": *n = Rescale01 diff --git a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go old mode 100755 new mode 100644 index 5c2eec215f..1af0c2344f --- a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go +++ b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package numericfielddataformat package numericfielddataformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 type NumericFielddataFormat struct { Name string } @@ -39,7 +39,7 @@ func (n NumericFielddataFormat) MarshalText() (text []byte, err error) { } func (n *NumericFielddataFormat) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "array": *n = Array diff --git a/typedapi/types/enums/onscripterror/onscripterror.go b/typedapi/types/enums/onscripterror/onscripterror.go old mode 100755 new mode 100644 index 4440160bee..cf475611fd --- a/typedapi/types/enums/onscripterror/onscripterror.go +++ b/typedapi/types/enums/onscripterror/onscripterror.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package onscripterror package onscripterror import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L126-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L126-L129 type OnScriptError struct { Name string } @@ -39,7 +39,7 @@ func (o OnScriptError) MarshalText() (text []byte, err error) { } func (o *OnScriptError) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "fail": *o = Fail diff --git a/typedapi/types/enums/operator/operator.go b/typedapi/types/enums/operator/operator.go old mode 100755 new mode 100644 index a520078643..d86e419496 --- a/typedapi/types/enums/operator/operator.go +++ b/typedapi/types/enums/operator/operator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package operator package operator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/Operator.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/Operator.ts#L22-L27 type Operator struct { Name string } @@ -39,7 +39,7 @@ func (o Operator) MarshalText() (text []byte, err error) { } func (o *Operator) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "and": *o = And diff --git a/typedapi/types/enums/optype/optype.go b/typedapi/types/enums/optype/optype.go old mode 100755 new mode 100644 index 39ccec6839..88ff45a047 --- a/typedapi/types/enums/optype/optype.go +++ b/typedapi/types/enums/optype/optype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package optype package optype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L235-L238 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L235-L238 type OpType struct { Name string } @@ -39,7 +39,7 @@ func (o OpType) MarshalText() (text []byte, err error) { } func (o *OpType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "index": *o = Index diff --git a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go old mode 100755 new mode 100644 index 1daa15e79e..cbe2a0e6b1 --- a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go +++ b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package pagerdutycontexttype package pagerdutycontexttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L67-L70 type PagerDutyContextType struct { Name string } @@ -39,7 +39,7 @@ func (p PagerDutyContextType) MarshalText() (text []byte, err error) { } func (p *PagerDutyContextType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "link": *p = Link diff --git a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go old mode 100755 new mode 100644 index 23e54026ab..f2e8d79fe3 --- a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go +++ b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package pagerdutyeventtype package pagerdutyeventtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L72-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L72-L76 type PagerDutyEventType struct { Name string } @@ -41,7 +41,7 @@ func (p PagerDutyEventType) MarshalText() (text []byte, err error) { } func (p *PagerDutyEventType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "trigger": *p = Trigger diff --git a/typedapi/types/enums/phoneticencoder/phoneticencoder.go b/typedapi/types/enums/phoneticencoder/phoneticencoder.go old mode 100755 new mode 100644 index 49e5edab57..b0456c091a --- a/typedapi/types/enums/phoneticencoder/phoneticencoder.go +++ b/typedapi/types/enums/phoneticencoder/phoneticencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package phoneticencoder package phoneticencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/phonetic-plugin.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/phonetic-plugin.ts#L23-L36 type PhoneticEncoder struct { Name string } @@ -59,7 +59,7 @@ func (p PhoneticEncoder) MarshalText() (text []byte, err error) { } func (p *PhoneticEncoder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "metaphone": *p = Metaphone diff --git a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go old mode 100755 new mode 100644 index 10c221a19e..900e2c51b5 --- a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go +++ b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package phoneticlanguage package phoneticlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/phonetic-plugin.ts#L38-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/phonetic-plugin.ts#L38-L51 type PhoneticLanguage struct { Name string } @@ -59,7 +59,7 @@ func (p PhoneticLanguage) MarshalText() (text []byte, err error) { } func (p *PhoneticLanguage) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "any": *p = Any diff --git a/typedapi/types/enums/phoneticnametype/phoneticnametype.go b/typedapi/types/enums/phoneticnametype/phoneticnametype.go old mode 100755 new mode 100644 index b736974d3a..6b1a446119 --- a/typedapi/types/enums/phoneticnametype/phoneticnametype.go +++ b/typedapi/types/enums/phoneticnametype/phoneticnametype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package phoneticnametype package phoneticnametype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/phonetic-plugin.ts#L53-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/phonetic-plugin.ts#L53-L57 type PhoneticNameType struct { Name string } @@ -41,7 +41,7 @@ func (p PhoneticNameType) MarshalText() (text []byte, err error) { } func (p *PhoneticNameType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "generic": *p = Generic diff --git a/typedapi/types/enums/phoneticruletype/phoneticruletype.go b/typedapi/types/enums/phoneticruletype/phoneticruletype.go old mode 100755 new mode 100644 index ddb1ff5861..d160038832 --- a/typedapi/types/enums/phoneticruletype/phoneticruletype.go +++ b/typedapi/types/enums/phoneticruletype/phoneticruletype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package phoneticruletype package phoneticruletype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/phonetic-plugin.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/phonetic-plugin.ts#L59-L62 type PhoneticRuleType struct { Name string } @@ -39,7 +39,7 @@ func (p PhoneticRuleType) MarshalText() (text []byte, err error) { } func (p *PhoneticRuleType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "approx": *p = Approx diff --git a/typedapi/types/enums/policytype/policytype.go b/typedapi/types/enums/policytype/policytype.go old mode 100755 new mode 100644 index c8d052b740..dee63271fe --- a/typedapi/types/enums/policytype/policytype.go +++ b/typedapi/types/enums/policytype/policytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package policytype package policytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/_types/Policy.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/_types/Policy.ts#L27-L31 type PolicyType struct { Name string } @@ -41,7 +41,7 @@ func (p PolicyType) MarshalText() (text []byte, err error) { } func (p *PolicyType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "geo_match": *p = Geomatch diff --git a/typedapi/types/enums/quantifier/quantifier.go b/typedapi/types/enums/quantifier/quantifier.go old mode 100755 new mode 100644 index 69b3e06067..26418a81f7 --- a/typedapi/types/enums/quantifier/quantifier.go +++ b/typedapi/types/enums/quantifier/quantifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package quantifier package quantifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L71-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L71-L74 type Quantifier struct { Name string } @@ -39,7 +39,7 @@ func (q Quantifier) MarshalText() (text []byte, err error) { } func (q *Quantifier) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "some": *q = Some diff --git a/typedapi/types/enums/rangerelation/rangerelation.go b/typedapi/types/enums/rangerelation/rangerelation.go old mode 100755 new mode 100644 index 4e711b8615..7d825ff8ec --- a/typedapi/types/enums/rangerelation/rangerelation.go +++ b/typedapi/types/enums/rangerelation/rangerelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package rangerelation package rangerelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L96-L100 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L96-L100 type RangeRelation struct { Name string } @@ -41,7 +41,7 @@ func (r RangeRelation) MarshalText() (text []byte, err error) { } func (r *RangeRelation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "within": *r = Within diff --git a/typedapi/types/enums/ratemode/ratemode.go b/typedapi/types/enums/ratemode/ratemode.go old mode 100755 new mode 100644 index 82aef1a1c5..760d9f55af --- a/typedapi/types/enums/ratemode/ratemode.go +++ b/typedapi/types/enums/ratemode/ratemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package ratemode package ratemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L132-L135 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L132-L135 type RateMode struct { Name string } @@ -39,7 +39,7 @@ func (r RateMode) MarshalText() (text []byte, err error) { } func (r *RateMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "sum": *r = Sum diff --git a/typedapi/types/enums/refresh/refresh.go b/typedapi/types/enums/refresh/refresh.go old mode 100755 new mode 100644 index 418ceab649..6046a7aa45 --- a/typedapi/types/enums/refresh/refresh.go +++ b/typedapi/types/enums/refresh/refresh.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package refresh package refresh import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L240-L247 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L240-L247 type Refresh struct { Name string } @@ -36,12 +36,16 @@ var ( Waitfor = Refresh{"wait_for"} ) +func (r *Refresh) UnmarshalJSON(data []byte) error { + return r.UnmarshalText(data) +} + func (r Refresh) MarshalText() (text []byte, err error) { return []byte(r.String()), nil } func (r *Refresh) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "true": *r = True diff --git a/typedapi/types/enums/responsecontenttype/responsecontenttype.go b/typedapi/types/enums/responsecontenttype/responsecontenttype.go old mode 100755 new mode 100644 index 9affe27c9e..f4d8e2f40b --- a/typedapi/types/enums/responsecontenttype/responsecontenttype.go +++ b/typedapi/types/enums/responsecontenttype/responsecontenttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package responsecontenttype package responsecontenttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L106-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L106-L110 type ResponseContentType struct { Name string } @@ -41,7 +41,7 @@ func (r ResponseContentType) MarshalText() (text []byte, err error) { } func (r *ResponseContentType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "json": *r = Json diff --git a/typedapi/types/enums/result/result.go b/typedapi/types/enums/result/result.go old mode 100755 new mode 100644 index 816d52adf3..8fbfec2887 --- a/typedapi/types/enums/result/result.go +++ b/typedapi/types/enums/result/result.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package result package result import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Result.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Result.ts#L20-L27 type Result struct { Name string } @@ -45,7 +45,7 @@ func (r Result) MarshalText() (text []byte, err error) { } func (r *Result) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "created": *r = Created diff --git a/typedapi/types/enums/resultposition/resultposition.go b/typedapi/types/enums/resultposition/resultposition.go old mode 100755 new mode 100644 index c9471dedfe..b9a41c9e94 --- a/typedapi/types/enums/resultposition/resultposition.go +++ b/typedapi/types/enums/resultposition/resultposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package resultposition package resultposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/search/types.ts#L20-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/search/types.ts#L20-L32 type ResultPosition struct { Name string } @@ -39,7 +39,7 @@ func (r ResultPosition) MarshalText() (text []byte, err error) { } func (r *ResultPosition) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "tail": *r = Tail diff --git a/typedapi/types/enums/routingstate/routingstate.go b/typedapi/types/enums/routingstate/routingstate.go old mode 100755 new mode 100644 index 3e6ecc5a99..bd2475d5b3 --- a/typedapi/types/enums/routingstate/routingstate.go +++ b/typedapi/types/enums/routingstate/routingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package routingstate package routingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L335-L356 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L341-L362 type RoutingState struct { Name string } @@ -45,7 +45,7 @@ func (r RoutingState) MarshalText() (text []byte, err error) { } func (r *RoutingState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "failed": *r = Failed diff --git a/typedapi/types/enums/ruleaction/ruleaction.go b/typedapi/types/enums/ruleaction/ruleaction.go old mode 100755 new mode 100644 index 2a76ce8a9a..a47602dcd7 --- a/typedapi/types/enums/ruleaction/ruleaction.go +++ b/typedapi/types/enums/ruleaction/ruleaction.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package ruleaction package ruleaction import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Rule.ts#L41-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Rule.ts#L41-L50 type RuleAction struct { Name string } @@ -39,7 +39,7 @@ func (r RuleAction) MarshalText() (text []byte, err error) { } func (r *RuleAction) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "skip_result": *r = Skipresult diff --git a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go old mode 100755 new mode 100644 index 7a4af32ff3..cef5f97bab --- a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go +++ b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package runtimefieldtype package runtimefieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/RuntimeFields.ts#L46-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/RuntimeFields.ts#L46-L55 type RuntimeFieldType struct { Name string } @@ -51,7 +51,7 @@ func (r RuntimeFieldType) MarshalText() (text []byte, err error) { } func (r *RuntimeFieldType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "boolean": *r = Boolean diff --git a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go old mode 100755 new mode 100644 index fdb8b4fc26..c01ad28351 --- a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go +++ b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package sampleraggregationexecutionhint package sampleraggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L163-L167 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L163-L167 type SamplerAggregationExecutionHint struct { Name string } @@ -41,7 +41,7 @@ func (s SamplerAggregationExecutionHint) MarshalText() (text []byte, err error) } func (s *SamplerAggregationExecutionHint) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "map": *s = Map diff --git a/typedapi/types/enums/scoremode/scoremode.go b/typedapi/types/enums/scoremode/scoremode.go old mode 100755 new mode 100644 index e740f594e2..162815e76b --- a/typedapi/types/enums/scoremode/scoremode.go +++ b/typedapi/types/enums/scoremode/scoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package scoremode package scoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/rescoring.ts#L36-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/rescoring.ts#L36-L42 type ScoreMode struct { Name string } @@ -45,7 +45,7 @@ func (s ScoreMode) MarshalText() (text []byte, err error) { } func (s *ScoreMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "avg": *s = Avg diff --git a/typedapi/types/enums/scriptlanguage/scriptlanguage.go b/typedapi/types/enums/scriptlanguage/scriptlanguage.go old mode 100755 new mode 100644 index 30b6799a86..f59be9c7f6 --- a/typedapi/types/enums/scriptlanguage/scriptlanguage.go +++ b/typedapi/types/enums/scriptlanguage/scriptlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package scriptlanguage package scriptlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L24-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L24-L33 type ScriptLanguage struct { Name string } @@ -43,7 +43,7 @@ func (s ScriptLanguage) MarshalText() (text []byte, err error) { } func (s *ScriptLanguage) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "painless": *s = Painless diff --git a/typedapi/types/enums/scriptsorttype/scriptsorttype.go b/typedapi/types/enums/scriptsorttype/scriptsorttype.go old mode 100755 new mode 100644 index d6eace3c80..7a774e0ad6 --- a/typedapi/types/enums/scriptsorttype/scriptsorttype.go +++ b/typedapi/types/enums/scriptsorttype/scriptsorttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package scriptsorttype package scriptsorttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L76-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L76-L80 type ScriptSortType struct { Name string } @@ -41,7 +41,7 @@ func (s ScriptSortType) MarshalText() (text []byte, err error) { } func (s *ScriptSortType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "string": *s = String diff --git a/typedapi/types/enums/searchtype/searchtype.go b/typedapi/types/enums/searchtype/searchtype.go old mode 100755 new mode 100644 index 26f8d3b61d..e58783a5d2 --- a/typedapi/types/enums/searchtype/searchtype.go +++ b/typedapi/types/enums/searchtype/searchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package searchtype package searchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L249-L254 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L249-L254 type SearchType struct { Name string } @@ -39,7 +39,7 @@ func (s SearchType) MarshalText() (text []byte, err error) { } func (s *SearchType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "query_then_fetch": *s = Querythenfetch diff --git a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go old mode 100755 new mode 100644 index c6f4af4ff8..a08af3f90e --- a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go +++ b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package segmentsortmissing package segmentsortmissing import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSegmentSort.ts#L43-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSegmentSort.ts#L43-L48 type SegmentSortMissing struct { Name string } @@ -39,7 +39,7 @@ func (s SegmentSortMissing) MarshalText() (text []byte, err error) { } func (s *SegmentSortMissing) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "_last": *s = Last diff --git a/typedapi/types/enums/segmentsortmode/segmentsortmode.go b/typedapi/types/enums/segmentsortmode/segmentsortmode.go old mode 100755 new mode 100644 index e52b6710a0..51aa3edd4f --- a/typedapi/types/enums/segmentsortmode/segmentsortmode.go +++ b/typedapi/types/enums/segmentsortmode/segmentsortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package segmentsortmode package segmentsortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSegmentSort.ts#L36-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSegmentSort.ts#L36-L41 type SegmentSortMode struct { Name string } @@ -39,7 +39,7 @@ func (s SegmentSortMode) MarshalText() (text []byte, err error) { } func (s *SegmentSortMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "min": *s = Min diff --git a/typedapi/types/enums/segmentsortorder/segmentsortorder.go b/typedapi/types/enums/segmentsortorder/segmentsortorder.go old mode 100755 new mode 100644 index 736c409406..5f686ea043 --- a/typedapi/types/enums/segmentsortorder/segmentsortorder.go +++ b/typedapi/types/enums/segmentsortorder/segmentsortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package segmentsortorder package segmentsortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSegmentSort.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSegmentSort.ts#L29-L34 type SegmentSortOrder struct { Name string } @@ -39,7 +39,7 @@ func (s SegmentSortOrder) MarshalText() (text []byte, err error) { } func (s *SegmentSortOrder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "asc": *s = Asc diff --git a/typedapi/types/enums/shapetype/shapetype.go b/typedapi/types/enums/shapetype/shapetype.go old mode 100755 new mode 100644 index 42b0e4f783..935b1e70a0 --- a/typedapi/types/enums/shapetype/shapetype.go +++ b/typedapi/types/enums/shapetype/shapetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shapetype package shapetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L343-L346 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L343-L346 type ShapeType struct { Name string } @@ -39,7 +39,7 @@ func (s ShapeType) MarshalText() (text []byte, err error) { } func (s *ShapeType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "geo_shape": *s = Geoshape diff --git a/typedapi/types/enums/shardroutingstate/shardroutingstate.go b/typedapi/types/enums/shardroutingstate/shardroutingstate.go old mode 100755 new mode 100644 index e03ded5750..0ff4e15a6f --- a/typedapi/types/enums/shardroutingstate/shardroutingstate.go +++ b/typedapi/types/enums/shardroutingstate/shardroutingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shardroutingstate package shardroutingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L160-L165 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L160-L165 type ShardRoutingState struct { Name string } @@ -43,7 +43,7 @@ func (s ShardRoutingState) MarshalText() (text []byte, err error) { } func (s *ShardRoutingState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "UNASSIGNED": *s = UNASSIGNED diff --git a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go old mode 100755 new mode 100644 index 49b69354b6..8774184baa --- a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go +++ b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shardsstatsstage package shardsstatsstage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 type ShardsStatsStage struct { Name string } @@ -45,7 +45,7 @@ func (s ShardsStatsStage) MarshalText() (text []byte, err error) { } func (s *ShardsStatsStage) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "DONE": *s = DONE diff --git a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go old mode 100755 new mode 100644 index c0a8d723ec..ea43a3eebe --- a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go +++ b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shardstoreallocation package shardstoreallocation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L45-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L45-L49 type ShardStoreAllocation struct { Name string } @@ -41,7 +41,7 @@ func (s ShardStoreAllocation) MarshalText() (text []byte, err error) { } func (s *ShardStoreAllocation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "primary": *s = Primary diff --git a/typedapi/types/enums/shardstorestatus/shardstorestatus.go b/typedapi/types/enums/shardstorestatus/shardstorestatus.go old mode 100755 new mode 100644 index 7a3f092071..0f2c9c20d0 --- a/typedapi/types/enums/shardstorestatus/shardstorestatus.go +++ b/typedapi/types/enums/shardstorestatus/shardstorestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shardstorestatus package shardstorestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L60-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L60-L69 type ShardStoreStatus struct { Name string } @@ -43,7 +43,7 @@ func (s ShardStoreStatus) MarshalText() (text []byte, err error) { } func (s *ShardStoreStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "green": *s = Green diff --git a/typedapi/types/enums/shutdownstatus/shutdownstatus.go b/typedapi/types/enums/shutdownstatus/shutdownstatus.go old mode 100755 new mode 100644 index 102868c771..965b5320b8 --- a/typedapi/types/enums/shutdownstatus/shutdownstatus.go +++ b/typedapi/types/enums/shutdownstatus/shutdownstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shutdownstatus package shutdownstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 type ShutdownStatus struct { Name string } @@ -43,7 +43,7 @@ func (s ShutdownStatus) MarshalText() (text []byte, err error) { } func (s *ShutdownStatus) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "not_started": *s = Notstarted diff --git a/typedapi/types/enums/shutdowntype/shutdowntype.go b/typedapi/types/enums/shutdowntype/shutdowntype.go old mode 100755 new mode 100644 index 9b5cf2e084..62cc18d43e --- a/typedapi/types/enums/shutdowntype/shutdowntype.go +++ b/typedapi/types/enums/shutdowntype/shutdowntype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package shutdowntype package shutdowntype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 type ShutdownType struct { Name string } @@ -39,7 +39,7 @@ func (s ShutdownType) MarshalText() (text []byte, err error) { } func (s *ShutdownType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "remove": *s = Remove diff --git a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go old mode 100755 new mode 100644 index 38b2cdbd35..974d18863d --- a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go +++ b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package simplequerystringflag package simplequerystringflag import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L278-L292 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L278-L292 type SimpleQueryStringFlag struct { Name string } @@ -61,7 +61,7 @@ func (s SimpleQueryStringFlag) MarshalText() (text []byte, err error) { } func (s *SimpleQueryStringFlag) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "NONE": *s = NONE diff --git a/typedapi/types/enums/slicescalculation/slicescalculation.go b/typedapi/types/enums/slicescalculation/slicescalculation.go old mode 100755 new mode 100644 index ac02be2ee3..f19e1e892d --- a/typedapi/types/enums/slicescalculation/slicescalculation.go +++ b/typedapi/types/enums/slicescalculation/slicescalculation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package slicescalculation package slicescalculation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L333-L341 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L333-L341 type SlicesCalculation struct { Name string } @@ -37,7 +37,7 @@ func (s SlicesCalculation) MarshalText() (text []byte, err error) { } func (s *SlicesCalculation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "auto": *s = Auto diff --git a/typedapi/types/enums/snapshotsort/snapshotsort.go b/typedapi/types/enums/snapshotsort/snapshotsort.go old mode 100755 new mode 100644 index 0ca26ac279..4987f07b91 --- a/typedapi/types/enums/snapshotsort/snapshotsort.go +++ b/typedapi/types/enums/snapshotsort/snapshotsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package snapshotsort package snapshotsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotInfo.ts#L67-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotInfo.ts#L67-L78 type SnapshotSort struct { Name string } @@ -49,7 +49,7 @@ func (s SnapshotSort) MarshalText() (text []byte, err error) { } func (s *SnapshotSort) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "start_time": *s = Starttime diff --git a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go old mode 100755 new mode 100644 index f9a804f286..711e3475f4 --- a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go +++ b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package snapshotupgradestate package snapshotupgradestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L91-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L91-L96 type SnapshotUpgradeState struct { Name string } @@ -43,7 +43,7 @@ func (s SnapshotUpgradeState) MarshalText() (text []byte, err error) { } func (s *SnapshotUpgradeState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "loading_old_state": *s = Loadingoldstate diff --git a/typedapi/types/enums/snowballlanguage/snowballlanguage.go b/typedapi/types/enums/snowballlanguage/snowballlanguage.go old mode 100755 new mode 100644 index 9914bc807f..e219f9a0aa --- a/typedapi/types/enums/snowballlanguage/snowballlanguage.go +++ b/typedapi/types/enums/snowballlanguage/snowballlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package snowballlanguage package snowballlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/languages.ts#L57-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/languages.ts#L57-L80 type SnowballLanguage struct { Name string } @@ -79,7 +79,7 @@ func (s SnowballLanguage) MarshalText() (text []byte, err error) { } func (s *SnowballLanguage) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "Armenian": *s = Armenian diff --git a/typedapi/types/enums/sortmode/sortmode.go b/typedapi/types/enums/sortmode/sortmode.go old mode 100755 new mode 100644 index 0fa2f95485..e7ca216a02 --- a/typedapi/types/enums/sortmode/sortmode.go +++ b/typedapi/types/enums/sortmode/sortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package sortmode package sortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L103-L112 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L103-L112 type SortMode struct { Name string } @@ -45,7 +45,7 @@ func (s SortMode) MarshalText() (text []byte, err error) { } func (s *SortMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "min": *s = Min diff --git a/typedapi/types/enums/sortorder/sortorder.go b/typedapi/types/enums/sortorder/sortorder.go old mode 100755 new mode 100644 index c890e9f556..55d72bbed5 --- a/typedapi/types/enums/sortorder/sortorder.go +++ b/typedapi/types/enums/sortorder/sortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package sortorder package sortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L114-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L114-L117 type SortOrder struct { Name string } @@ -39,7 +39,7 @@ func (s SortOrder) MarshalText() (text []byte, err error) { } func (s *SortOrder) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "asc": *s = Asc diff --git a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go old mode 100755 new mode 100644 index 79f6f5c3f4..c254aa167f --- a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go +++ b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package sourcefieldmode package sourcefieldmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L67-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L67-L75 type SourceFieldMode struct { Name string } @@ -41,7 +41,7 @@ func (s SourceFieldMode) MarshalText() (text []byte, err error) { } func (s *SourceFieldMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "disabled": *s = Disabled diff --git a/typedapi/types/enums/statslevel/statslevel.go b/typedapi/types/enums/statslevel/statslevel.go old mode 100755 new mode 100644 index 597bd6c02b..8148dafaa9 --- a/typedapi/types/enums/statslevel/statslevel.go +++ b/typedapi/types/enums/statslevel/statslevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package statslevel package statslevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/_types/stats.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/_types/stats.ts#L20-L24 type StatsLevel struct { Name string } @@ -41,7 +41,7 @@ func (s StatsLevel) MarshalText() (text []byte, err error) { } func (s *StatsLevel) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "cluster": *s = Cluster diff --git a/typedapi/types/enums/storagetype/storagetype.go b/typedapi/types/enums/storagetype/storagetype.go old mode 100755 new mode 100644 index 53e7587107..518119d352 --- a/typedapi/types/enums/storagetype/storagetype.go +++ b/typedapi/types/enums/storagetype/storagetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package storagetype package storagetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L508-L538 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L508-L538 type StorageType struct { Name string } @@ -43,7 +43,7 @@ func (s StorageType) MarshalText() (text []byte, err error) { } func (s *StorageType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "fs": *s = Fs diff --git a/typedapi/types/enums/stringdistance/stringdistance.go b/typedapi/types/enums/stringdistance/stringdistance.go old mode 100755 new mode 100644 index 295d3338e1..1d2297e04f --- a/typedapi/types/enums/stringdistance/stringdistance.go +++ b/typedapi/types/enums/stringdistance/stringdistance.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package stringdistance package stringdistance import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L239-L245 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L243-L249 type StringDistance struct { Name string } @@ -45,7 +45,7 @@ func (s StringDistance) MarshalText() (text []byte, err error) { } func (s *StringDistance) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "internal": *s = Internal diff --git a/typedapi/types/enums/suggestmode/suggestmode.go b/typedapi/types/enums/suggestmode/suggestmode.go old mode 100755 new mode 100644 index a90c596a24..bb736fa7a2 --- a/typedapi/types/enums/suggestmode/suggestmode.go +++ b/typedapi/types/enums/suggestmode/suggestmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package suggestmode package suggestmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L256-L260 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L256-L260 type SuggestMode struct { Name string } @@ -41,7 +41,7 @@ func (s SuggestMode) MarshalText() (text []byte, err error) { } func (s *SuggestMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "missing": *s = Missing diff --git a/typedapi/types/enums/suggestsort/suggestsort.go b/typedapi/types/enums/suggestsort/suggestsort.go old mode 100755 new mode 100644 index 1c396ab443..67439892ed --- a/typedapi/types/enums/suggestsort/suggestsort.go +++ b/typedapi/types/enums/suggestsort/suggestsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package suggestsort package suggestsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L247-L250 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L251-L254 type SuggestSort struct { Name string } @@ -39,7 +39,7 @@ func (s SuggestSort) MarshalText() (text []byte, err error) { } func (s *SuggestSort) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "score": *s = Score diff --git a/typedapi/types/enums/synonymformat/synonymformat.go b/typedapi/types/enums/synonymformat/synonymformat.go old mode 100755 new mode 100644 index f3537d5400..a3fe7822d5 --- a/typedapi/types/enums/synonymformat/synonymformat.go +++ b/typedapi/types/enums/synonymformat/synonymformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package synonymformat package synonymformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L104-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L105-L108 type SynonymFormat struct { Name string } @@ -39,7 +39,7 @@ func (s SynonymFormat) MarshalText() (text []byte, err error) { } func (s *SynonymFormat) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "solr": *s = Solr diff --git a/typedapi/types/enums/templateformat/templateformat.go b/typedapi/types/enums/templateformat/templateformat.go old mode 100755 new mode 100644 index fe2c137734..cd3baa20e6 --- a/typedapi/types/enums/templateformat/templateformat.go +++ b/typedapi/types/enums/templateformat/templateformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package templateformat package templateformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_role/types.ts#L41-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_role/types.ts#L41-L44 type TemplateFormat struct { Name string } @@ -39,7 +39,7 @@ func (t TemplateFormat) MarshalText() (text []byte, err error) { } func (t *TemplateFormat) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "string": *t = String diff --git a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go old mode 100755 new mode 100644 index 5c8f7dc6a2..50fafaf923 --- a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go +++ b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package termsaggregationcollectmode package termsaggregationcollectmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L407-L410 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L407-L410 type TermsAggregationCollectMode struct { Name string } @@ -39,7 +39,7 @@ func (t TermsAggregationCollectMode) MarshalText() (text []byte, err error) { } func (t *TermsAggregationCollectMode) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "depth_first": *t = Depthfirst diff --git a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go old mode 100755 new mode 100644 index 48e1953159..52df5ad397 --- a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go +++ b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package termsaggregationexecutionhint package termsaggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L412-L417 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L412-L417 type TermsAggregationExecutionHint struct { Name string } @@ -43,7 +43,7 @@ func (t TermsAggregationExecutionHint) MarshalText() (text []byte, err error) { } func (t *TermsAggregationExecutionHint) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "map": *t = Map diff --git a/typedapi/types/enums/termvectoroption/termvectoroption.go b/typedapi/types/enums/termvectoroption/termvectoroption.go old mode 100755 new mode 100644 index 560c2ecbb5..25c577fd29 --- a/typedapi/types/enums/termvectoroption/termvectoroption.go +++ b/typedapi/types/enums/termvectoroption/termvectoroption.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package termvectoroption package termvectoroption import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/TermVectorOption.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/TermVectorOption.ts#L20-L28 type TermVectorOption struct { Name string } @@ -49,7 +49,7 @@ func (t TermVectorOption) MarshalText() (text []byte, err error) { } func (t *TermVectorOption) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "no": *t = No diff --git a/typedapi/types/enums/textquerytype/textquerytype.go b/typedapi/types/enums/textquerytype/textquerytype.go old mode 100755 new mode 100644 index e0825a57d4..455e8d24a0 --- a/typedapi/types/enums/textquerytype/textquerytype.go +++ b/typedapi/types/enums/textquerytype/textquerytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package textquerytype package textquerytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L219-L226 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L219-L226 type TextQueryType struct { Name string } @@ -47,7 +47,7 @@ func (t TextQueryType) MarshalText() (text []byte, err error) { } func (t *TextQueryType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "best_fields": *t = Bestfields diff --git a/typedapi/types/enums/threadtype/threadtype.go b/typedapi/types/enums/threadtype/threadtype.go old mode 100755 new mode 100644 index 6492454ca0..63f1be5e71 --- a/typedapi/types/enums/threadtype/threadtype.go +++ b/typedapi/types/enums/threadtype/threadtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package threadtype package threadtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L262-L268 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L262-L268 type ThreadType struct { Name string } @@ -45,7 +45,7 @@ func (t ThreadType) MarshalText() (text []byte, err error) { } func (t *ThreadType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "cpu": *t = Cpu diff --git a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go old mode 100755 new mode 100644 index 8392a5fb7b..f44f51e535 --- a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go +++ b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package timeseriesmetrictype package timeseriesmetrictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L25 type TimeSeriesMetricType struct { Name string } @@ -43,7 +43,7 @@ func (t TimeSeriesMetricType) MarshalText() (text []byte, err error) { } func (t *TimeSeriesMetricType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "gauge": *t = Gauge diff --git a/typedapi/types/enums/timeunit/timeunit.go b/typedapi/types/enums/timeunit/timeunit.go old mode 100755 new mode 100644 index 1360af90f5..b93745b15e --- a/typedapi/types/enums/timeunit/timeunit.go +++ b/typedapi/types/enums/timeunit/timeunit.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package timeunit package timeunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L69-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L69-L84 type TimeUnit struct { Name string } @@ -49,7 +49,7 @@ func (t TimeUnit) MarshalText() (text []byte, err error) { } func (t *TimeUnit) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "nanos": *t = Nanos diff --git a/typedapi/types/enums/tokenchar/tokenchar.go b/typedapi/types/enums/tokenchar/tokenchar.go old mode 100755 new mode 100644 index bbc3336203..ca2985aa08 --- a/typedapi/types/enums/tokenchar/tokenchar.go +++ b/typedapi/types/enums/tokenchar/tokenchar.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package tokenchar package tokenchar import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L46-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L46-L53 type TokenChar struct { Name string } @@ -47,7 +47,7 @@ func (t TokenChar) MarshalText() (text []byte, err error) { } func (t *TokenChar) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "letter": *t = Letter diff --git a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go old mode 100755 new mode 100644 index 0821523d89..5146b6e80a --- a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go +++ b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package tokenizationtruncate package tokenizationtruncate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L315-L319 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L315-L319 type TokenizationTruncate struct { Name string } @@ -41,7 +41,7 @@ func (t TokenizationTruncate) MarshalText() (text []byte, err error) { } func (t *TokenizationTruncate) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "first": *t = First diff --git a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go old mode 100755 new mode 100644 index 1fd4375954..a20ffac1ea --- a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go +++ b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package totalhitsrelation package totalhitsrelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L99-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L99-L104 type TotalHitsRelation struct { Name string } @@ -39,7 +39,7 @@ func (t TotalHitsRelation) MarshalText() (text []byte, err error) { } func (t *TotalHitsRelation) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "eq": *t = Eq diff --git a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go old mode 100755 new mode 100644 index e3c800c746..7cb086cd15 --- a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go +++ b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package trainedmodeltype package trainedmodeltype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L247-L261 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L249-L263 type TrainedModelType struct { Name string } @@ -41,7 +41,7 @@ func (t TrainedModelType) MarshalText() (text []byte, err error) { } func (t *TrainedModelType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "tree_ensemble": *t = Treeensemble diff --git a/typedapi/types/enums/trainingpriority/trainingpriority.go b/typedapi/types/enums/trainingpriority/trainingpriority.go old mode 100755 new mode 100644 index 1ee2729bc9..0c2fce6a49 --- a/typedapi/types/enums/trainingpriority/trainingpriority.go +++ b/typedapi/types/enums/trainingpriority/trainingpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package trainingpriority package trainingpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L300-L303 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L302-L305 type TrainingPriority struct { Name string } @@ -39,7 +39,7 @@ func (t TrainingPriority) MarshalText() (text []byte, err error) { } func (t *TrainingPriority) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "normal": *t = Normal diff --git a/typedapi/types/enums/translogdurability/translogdurability.go b/typedapi/types/enums/translogdurability/translogdurability.go old mode 100755 new mode 100644 index 111295b9ae..88e16813f6 --- a/typedapi/types/enums/translogdurability/translogdurability.go +++ b/typedapi/types/enums/translogdurability/translogdurability.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package translogdurability package translogdurability import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L356-L371 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L356-L371 type TranslogDurability struct { Name string } @@ -39,7 +39,7 @@ func (t TranslogDurability) MarshalText() (text []byte, err error) { } func (t *TranslogDurability) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "request": *t = Request diff --git a/typedapi/types/enums/ttesttype/ttesttype.go b/typedapi/types/enums/ttesttype/ttesttype.go old mode 100755 new mode 100644 index fa77f25ee5..13bc681870 --- a/typedapi/types/enums/ttesttype/ttesttype.go +++ b/typedapi/types/enums/ttesttype/ttesttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package ttesttype package ttesttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L165-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L165-L169 type TTestType struct { Name string } @@ -41,7 +41,7 @@ func (t TTestType) MarshalText() (text []byte, err error) { } func (t *TTestType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "paired": *t = Paired diff --git a/typedapi/types/enums/type_/type_.go b/typedapi/types/enums/type_/type_.go old mode 100755 new mode 100644 index 63b4440c82..8bfeebacb1 --- a/typedapi/types/enums/type_/type_.go +++ b/typedapi/types/enums/type_/type_.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package type_ package type_ import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/_types/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/_types/types.ts#L20-L24 type Type struct { Name string } @@ -41,7 +41,7 @@ func (t Type) MarshalText() (text []byte, err error) { } func (t *Type) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "restart": *t = Restart diff --git a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go old mode 100755 new mode 100644 index 65672a06c1..cbc1c11c1e --- a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go +++ b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package unassignedinformationreason package unassignedinformationreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L127-L146 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L127-L146 type UnassignedInformationReason struct { Name string } @@ -65,7 +65,7 @@ func (u UnassignedInformationReason) MarshalText() (text []byte, err error) { } func (u *UnassignedInformationReason) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "INDEX_CREATED": *u = INDEXCREATED diff --git a/typedapi/types/enums/useragentproperty/useragentproperty.go b/typedapi/types/enums/useragentproperty/useragentproperty.go old mode 100755 new mode 100644 index ed10e39c9f..cea56b1155 --- a/typedapi/types/enums/useragentproperty/useragentproperty.go +++ b/typedapi/types/enums/useragentproperty/useragentproperty.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package useragentproperty package useragentproperty import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L77-L88 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L77-L88 type UserAgentProperty struct { Name string } @@ -55,7 +55,7 @@ func (u UserAgentProperty) MarshalText() (text []byte, err error) { } func (u *UserAgentProperty) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "NAME": *u = NAME diff --git a/typedapi/types/enums/valuetype/valuetype.go b/typedapi/types/enums/valuetype/valuetype.go old mode 100755 new mode 100644 index 50b2b569e8..eb983bf66a --- a/typedapi/types/enums/valuetype/valuetype.go +++ b/typedapi/types/enums/valuetype/valuetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package valuetype package valuetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L198-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L198-L209 type ValueType struct { Name string } @@ -55,7 +55,7 @@ func (v ValueType) MarshalText() (text []byte, err error) { } func (v *ValueType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "string": *v = String diff --git a/typedapi/types/enums/versiontype/versiontype.go b/typedapi/types/enums/versiontype/versiontype.go old mode 100755 new mode 100644 index 81d01b5ad5..753d944cfb --- a/typedapi/types/enums/versiontype/versiontype.go +++ b/typedapi/types/enums/versiontype/versiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package versiontype package versiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L98-L103 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L98-L103 type VersionType struct { Name string } @@ -43,7 +43,7 @@ func (v VersionType) MarshalText() (text []byte, err error) { } func (v *VersionType) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "internal": *v = Internal diff --git a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go old mode 100755 new mode 100644 index a931ad9424..9cb17b9afe --- a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go +++ b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package waitforactiveshardoptions package waitforactiveshardoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L270-L274 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L270-L274 type WaitForActiveShardOptions struct { Name string } @@ -39,7 +39,7 @@ func (w WaitForActiveShardOptions) MarshalText() (text []byte, err error) { } func (w *WaitForActiveShardOptions) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *w = All diff --git a/typedapi/types/enums/waitforevents/waitforevents.go b/typedapi/types/enums/waitforevents/waitforevents.go old mode 100755 new mode 100644 index 4285f63267..c79b20a030 --- a/typedapi/types/enums/waitforevents/waitforevents.go +++ b/typedapi/types/enums/waitforevents/waitforevents.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package waitforevents package waitforevents import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L276-L283 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L276-L283 type WaitForEvents struct { Name string } @@ -47,7 +47,7 @@ func (w WaitForEvents) MarshalText() (text []byte, err error) { } func (w *WaitForEvents) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "immediate": *w = Immediate diff --git a/typedapi/types/enums/watchermetric/watchermetric.go b/typedapi/types/enums/watchermetric/watchermetric.go old mode 100755 new mode 100644 index ceaa238da6..edb8a4979b --- a/typedapi/types/enums/watchermetric/watchermetric.go +++ b/typedapi/types/enums/watchermetric/watchermetric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package watchermetric package watchermetric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/types.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/types.ts#L42-L48 type WatcherMetric struct { Name string } @@ -43,7 +43,7 @@ func (w WatcherMetric) MarshalText() (text []byte, err error) { } func (w *WatcherMetric) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "_all": *w = All diff --git a/typedapi/types/enums/watcherstate/watcherstate.go b/typedapi/types/enums/watcherstate/watcherstate.go old mode 100755 new mode 100644 index 354d03fed7..c21944d357 --- a/typedapi/types/enums/watcherstate/watcherstate.go +++ b/typedapi/types/enums/watcherstate/watcherstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package watcherstate package watcherstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/types.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/types.ts#L26-L31 type WatcherState struct { Name string } @@ -43,7 +43,7 @@ func (w WatcherState) MarshalText() (text []byte, err error) { } func (w *WatcherState) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "stopped": *w = Stopped diff --git a/typedapi/types/enums/zerotermsquery/zerotermsquery.go b/typedapi/types/enums/zerotermsquery/zerotermsquery.go old mode 100755 new mode 100644 index 368e418324..3670d4e67d --- a/typedapi/types/enums/zerotermsquery/zerotermsquery.go +++ b/typedapi/types/enums/zerotermsquery/zerotermsquery.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Package zerotermsquery package zerotermsquery import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L228-L231 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L228-L231 type ZeroTermsQuery struct { Name string } @@ -39,7 +39,7 @@ func (z ZeroTermsQuery) MarshalText() (text []byte, err error) { } func (z *ZeroTermsQuery) UnmarshalText(text []byte) error { - switch strings.ToLower(string(text)) { + switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "all": *z = All diff --git a/typedapi/types/epochtimeunitmillis.go b/typedapi/types/epochtimeunitmillis.go old mode 100755 new mode 100644 index f53a47ede6..595df4d9e8 --- a/typedapi/types/epochtimeunitmillis.go +++ b/typedapi/types/epochtimeunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EpochTimeUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L40-L40 type EpochTimeUnitMillis int64 diff --git a/typedapi/types/epochtimeunitseconds.go b/typedapi/types/epochtimeunitseconds.go old mode 100755 new mode 100644 index 3905f2eb46..23271b828b --- a/typedapi/types/epochtimeunitseconds.go +++ b/typedapi/types/epochtimeunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EpochTimeUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Time.ts#L40-L40 type EpochTimeUnitSeconds int64 diff --git a/typedapi/types/eql.go b/typedapi/types/eql.go old mode 100755 new mode 100644 index b5fd467617..200bf3d2da --- a/typedapi/types/eql.go +++ b/typedapi/types/eql.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Eql type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L342-L345 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L342-L345 type Eql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -30,6 +40,67 @@ type Eql struct { Queries map[string]XpackQuery `json:"queries"` } +func (s *Eql) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "features": + if err := dec.Decode(&s.Features); err != nil { + return err + } + + case "queries": + if s.Queries == nil { + s.Queries = make(map[string]XpackQuery, 0) + } + if err := dec.Decode(&s.Queries); err != nil { + return err + } + + } + } + return nil +} + // NewEql returns a Eql. func NewEql() *Eql { r := &Eql{ diff --git a/typedapi/types/eqlfeatures.go b/typedapi/types/eqlfeatures.go old mode 100755 new mode 100644 index 8eb9b1d067..98f6993ade --- a/typedapi/types/eqlfeatures.go +++ b/typedapi/types/eqlfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L99-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L99-L107 type EqlFeatures struct { Event uint `json:"event"` Join uint `json:"join"` diff --git a/typedapi/types/eqlfeaturesjoin.go b/typedapi/types/eqlfeaturesjoin.go old mode 100755 new mode 100644 index 2194de740b..4f84d78ce4 --- a/typedapi/types/eqlfeaturesjoin.go +++ b/typedapi/types/eqlfeaturesjoin.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlFeaturesJoin type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L109-L115 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L109-L115 type EqlFeaturesJoin struct { JoinQueriesFiveOrMore uint `json:"join_queries_five_or_more"` JoinQueriesFour uint `json:"join_queries_four"` diff --git a/typedapi/types/eqlfeatureskeys.go b/typedapi/types/eqlfeatureskeys.go old mode 100755 new mode 100644 index 0ca3adfab2..767a797036 --- a/typedapi/types/eqlfeatureskeys.go +++ b/typedapi/types/eqlfeatureskeys.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlFeaturesKeys type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L117-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L117-L123 type EqlFeaturesKeys struct { JoinKeysFiveOrMore uint `json:"join_keys_five_or_more"` JoinKeysFour uint `json:"join_keys_four"` diff --git a/typedapi/types/eqlfeaturespipes.go b/typedapi/types/eqlfeaturespipes.go old mode 100755 new mode 100644 index 21e70a7902..4ebd192d92 --- a/typedapi/types/eqlfeaturespipes.go +++ b/typedapi/types/eqlfeaturespipes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlFeaturesPipes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L125-L128 type EqlFeaturesPipes struct { PipeHead uint `json:"pipe_head"` PipeTail uint `json:"pipe_tail"` diff --git a/typedapi/types/eqlfeaturessequences.go b/typedapi/types/eqlfeaturessequences.go old mode 100755 new mode 100644 index 485c60d9a3..3a437f383f --- a/typedapi/types/eqlfeaturessequences.go +++ b/typedapi/types/eqlfeaturessequences.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlFeaturesSequences type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L130-L137 type EqlFeaturesSequences struct { SequenceMaxspan uint `json:"sequence_maxspan"` SequenceQueriesFiveOrMore uint `json:"sequence_queries_five_or_more"` diff --git a/typedapi/types/eqlhits.go b/typedapi/types/eqlhits.go old mode 100755 new mode 100644 index 8953adef87..a0c3e3eda3 --- a/typedapi/types/eqlhits.go +++ b/typedapi/types/eqlhits.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // EqlHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/_types/EqlHits.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/_types/EqlHits.ts#L25-L39 type EqlHits struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events,omitempty"` diff --git a/typedapi/types/errorcause.go b/typedapi/types/errorcause.go old mode 100755 new mode 100644 index 91f1105948..ecd4b92aa3 --- a/typedapi/types/errorcause.go +++ b/typedapi/types/errorcause.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,7 +27,7 @@ import ( // ErrorCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Errors.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Errors.ts#L25-L48 type ErrorCause struct { CausedBy *ErrorCause `json:"caused_by,omitempty"` Metadata map[string]json.RawMessage `json:"-"` @@ -61,6 +61,7 @@ func (s ErrorCause) MarshalJSON() ([]byte, error) { for key, value := range s.Metadata { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Metadata") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/ewmamodelsettings.go b/typedapi/types/ewmamodelsettings.go old mode 100755 new mode 100644 index e15922cb2c..cd52054ab1 --- a/typedapi/types/ewmamodelsettings.go +++ b/typedapi/types/ewmamodelsettings.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // EwmaModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L227-L229 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L227-L229 type EwmaModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` } +func (s *EwmaModelSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Alpha = &f + case float64: + f := float32(v) + s.Alpha = &f + } + + } + } + return nil +} + // NewEwmaModelSettings returns a EwmaModelSettings. func NewEwmaModelSettings() *EwmaModelSettings { r := &EwmaModelSettings{} diff --git a/typedapi/types/ewmamovingaverageaggregation.go b/typedapi/types/ewmamovingaverageaggregation.go old mode 100755 new mode 100644 index 477c7eaf3d..eb7b1d07b7 --- a/typedapi/types/ewmamovingaverageaggregation.go +++ b/typedapi/types/ewmamovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,27 +27,30 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // EwmaMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L212-L215 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L212-L215 type EwmaMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Minimize *bool `json:"minimize,omitempty"` - Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` - Predict *int `json:"predict,omitempty"` - Settings EwmaModelSettings `json:"settings"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Minimize *bool `json:"minimize,omitempty"` + Model string `json:"model,omitempty"` + Name *string `json:"name,omitempty"` + Predict *int `json:"predict,omitempty"` + Settings EwmaModelSettings `json:"settings"` + Window *int `json:"window,omitempty"` } func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,9 +70,12 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -82,8 +88,17 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "minimize": - if err := dec.Decode(&s.Minimize); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Minimize = &value + case bool: + s.Minimize = &v } case "model": @@ -92,13 +107,27 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "predict": - if err := dec.Decode(&s.Predict); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Predict = &value + case float64: + f := int(v) + s.Predict = &f } case "settings": @@ -107,8 +136,19 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/executeenrichpolicystatus.go b/typedapi/types/executeenrichpolicystatus.go old mode 100755 new mode 100644 index d849621a2d..5d1e0767b8 --- a/typedapi/types/executeenrichpolicystatus.go +++ b/typedapi/types/executeenrichpolicystatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // ExecuteEnrichPolicyStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/execute_policy/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/execute_policy/types.ts#L20-L22 type ExecuteEnrichPolicyStatus struct { Phase enrichpolicyphase.EnrichPolicyPhase `json:"phase"` } diff --git a/typedapi/types/executingpolicy.go b/typedapi/types/executingpolicy.go old mode 100755 new mode 100644 index b3748c52d0..48f303f36d --- a/typedapi/types/executingpolicy.go +++ b/typedapi/types/executingpolicy.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ExecutingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/stats/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/stats/types.ts#L24-L27 type ExecutingPolicy struct { Name string `json:"name"` Task TaskInfo `json:"task"` } +func (s *ExecutingPolicy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "task": + if err := dec.Decode(&s.Task); err != nil { + return err + } + + } + } + return nil +} + // NewExecutingPolicy returns a ExecutingPolicy. func NewExecutingPolicy() *ExecutingPolicy { r := &ExecutingPolicy{} diff --git a/typedapi/types/executionresult.go b/typedapi/types/executionresult.go old mode 100755 new mode 100644 index 0bee8d751a..05972816a7 --- a/typedapi/types/executionresult.go +++ b/typedapi/types/executionresult.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ExecutionResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L60-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L60-L66 type ExecutionResult struct { Actions []ExecutionResultAction `json:"actions"` Condition ExecutionResultCondition `json:"condition"` @@ -31,6 +39,51 @@ type ExecutionResult struct { Input ExecutionResultInput `json:"input"` } +func (s *ExecutionResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "condition": + if err := dec.Decode(&s.Condition); err != nil { + return err + } + + case "execution_duration": + if err := dec.Decode(&s.ExecutionDuration); err != nil { + return err + } + + case "execution_time": + if err := dec.Decode(&s.ExecutionTime); err != nil { + return err + } + + case "input": + if err := dec.Decode(&s.Input); err != nil { + return err + } + + } + } + return nil +} + // NewExecutionResult returns a ExecutionResult. func NewExecutionResult() *ExecutionResult { r := &ExecutionResult{} diff --git a/typedapi/types/executionresultaction.go b/typedapi/types/executionresultaction.go old mode 100755 new mode 100644 index ae069f6bd8..23b164ae9e --- a/typedapi/types/executionresultaction.go +++ b/typedapi/types/executionresultaction.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/actionstatusoptions" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/actiontype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ExecutionResultAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L74-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L74-L86 type ExecutionResultAction struct { Email *EmailResult `json:"email,omitempty"` Error *ErrorCause `json:"error,omitempty"` @@ -42,6 +48,84 @@ type ExecutionResultAction struct { Webhook *WebhookResult `json:"webhook,omitempty"` } +func (s *ExecutionResultAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "email": + if err := dec.Decode(&s.Email); err != nil { + return err + } + + case "error": + if err := dec.Decode(&s.Error); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "logging": + if err := dec.Decode(&s.Logging); err != nil { + return err + } + + case "pagerduty": + if err := dec.Decode(&s.Pagerduty); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + case "slack": + if err := dec.Decode(&s.Slack); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "webhook": + if err := dec.Decode(&s.Webhook); err != nil { + return err + } + + } + } + return nil +} + // NewExecutionResultAction returns a ExecutionResultAction. func NewExecutionResultAction() *ExecutionResultAction { r := &ExecutionResultAction{} diff --git a/typedapi/types/executionresultcondition.go b/typedapi/types/executionresultcondition.go old mode 100755 new mode 100644 index 402d924c93..ab7e9b75d8 --- a/typedapi/types/executionresultcondition.go +++ b/typedapi/types/executionresultcondition.go @@ -16,24 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/actionstatusoptions" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/conditiontype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ExecutionResultCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L68-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L68-L72 type ExecutionResultCondition struct { Met bool `json:"met"` Status actionstatusoptions.ActionStatusOptions `json:"status"` Type conditiontype.ConditionType `json:"type"` } +func (s *ExecutionResultCondition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "met": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Met = value + case bool: + s.Met = v + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewExecutionResultCondition returns a ExecutionResultCondition. func NewExecutionResultCondition() *ExecutionResultCondition { r := &ExecutionResultCondition{} diff --git a/typedapi/types/executionresultinput.go b/typedapi/types/executionresultinput.go old mode 100755 new mode 100644 index e46f758649..bb4d4bf7e7 --- a/typedapi/types/executionresultinput.go +++ b/typedapi/types/executionresultinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,7 +29,7 @@ import ( // ExecutionResultInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L88-L92 type ExecutionResultInput struct { Payload map[string]json.RawMessage `json:"payload"` Status actionstatusoptions.ActionStatusOptions `json:"status"` diff --git a/typedapi/types/executionstate.go b/typedapi/types/executionstate.go old mode 100755 new mode 100644 index c33dc43303..ccce9b1831 --- a/typedapi/types/executionstate.go +++ b/typedapi/types/executionstate.go @@ -16,19 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExecutionState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L117-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L117-L121 type ExecutionState struct { Reason *string `json:"reason,omitempty"` Successful bool `json:"successful"` Timestamp DateTime `json:"timestamp"` } +func (s *ExecutionState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + case "successful": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Successful = value + case bool: + s.Successful = v + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewExecutionState returns a ExecutionState. func NewExecutionState() *ExecutionState { r := &ExecutionState{} diff --git a/typedapi/types/executionthreadpool.go b/typedapi/types/executionthreadpool.go old mode 100755 new mode 100644 index d483cdebd4..9773542668 --- a/typedapi/types/executionthreadpool.go +++ b/typedapi/types/executionthreadpool.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExecutionThreadPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Execution.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Execution.ts#L94-L97 type ExecutionThreadPool struct { MaxSize int64 `json:"max_size"` QueueSize int64 `json:"queue_size"` } +func (s *ExecutionThreadPool) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxSize = value + case float64: + f := int64(v) + s.MaxSize = f + } + + case "queue_size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.QueueSize = value + case float64: + f := int64(v) + s.QueueSize = f + } + + } + } + return nil +} + // NewExecutionThreadPool returns a ExecutionThreadPool. func NewExecutionThreadPool() *ExecutionThreadPool { r := &ExecutionThreadPool{} diff --git a/typedapi/types/existsquery.go b/typedapi/types/existsquery.go old mode 100755 new mode 100644 index 019210aad6..7fadd70546 --- a/typedapi/types/existsquery.go +++ b/typedapi/types/existsquery.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExistsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L36-L38 type ExistsQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` QueryName_ *string `json:"_name,omitempty"` } +func (s *ExistsQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewExistsQuery returns a ExistsQuery. func NewExistsQuery() *ExistsQuery { r := &ExistsQuery{} diff --git a/typedapi/types/expandwildcards.go b/typedapi/types/expandwildcards.go old mode 100755 new mode 100644 index fd16ae18b8..be78037244 --- a/typedapi/types/expandwildcards.go +++ b/typedapi/types/expandwildcards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // ExpandWildcards type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L197-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L197-L197 type ExpandWildcards []expandwildcard.ExpandWildcard diff --git a/typedapi/types/explainanalyzetoken.go b/typedapi/types/explainanalyzetoken.go old mode 100755 new mode 100644 index 285bc9a588..04e10e2067 --- a/typedapi/types/explainanalyzetoken.go +++ b/typedapi/types/explainanalyzetoken.go @@ -16,18 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ExplainAnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L52-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L52-L64 type ExplainAnalyzeToken struct { Bytes string `json:"bytes"` EndOffset int64 `json:"end_offset"` @@ -41,6 +48,152 @@ type ExplainAnalyzeToken struct { Type string `json:"type"` } +func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Bytes = o + + case "end_offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EndOffset = value + case float64: + f := int64(v) + s.EndOffset = f + } + + case "keyword": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyword = &value + case bool: + s.Keyword = &v + } + + case "position": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Position = value + case float64: + f := int64(v) + s.Position = f + } + + case "positionLength": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PositionLength = value + case float64: + f := int64(v) + s.PositionLength = f + } + + case "start_offset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StartOffset = value + case float64: + f := int64(v) + s.StartOffset = f + } + + case "termFrequency": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TermFrequency = value + case float64: + f := int64(v) + s.TermFrequency = f + } + + case "token": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Token = o + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + default: + + if key, ok := t.(string); ok { + if s.ExplainAnalyzeToken == nil { + s.ExplainAnalyzeToken = make(map[string]json.RawMessage, 0) + } + raw := new(json.RawMessage) + if err := dec.Decode(&raw); err != nil { + return err + } + s.ExplainAnalyzeToken[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s ExplainAnalyzeToken) MarshalJSON() ([]byte, error) { type opt ExplainAnalyzeToken @@ -60,6 +213,7 @@ func (s ExplainAnalyzeToken) MarshalJSON() ([]byte, error) { for key, value := range s.ExplainAnalyzeToken { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "ExplainAnalyzeToken") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/explanation.go b/typedapi/types/explanation.go old mode 100755 new mode 100644 index 55b2732f11..48cebb6b41 --- a/typedapi/types/explanation.go +++ b/typedapi/types/explanation.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Explanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/explain/types.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/explain/types.ts#L22-L26 type Explanation struct { Description string `json:"description"` Details []ExplanationDetail `json:"details"` Value float32 `json:"value"` } +func (s *Explanation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "details": + if err := dec.Decode(&s.Details); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Value = f + case float64: + f := float32(v) + s.Value = f + } + + } + } + return nil +} + // NewExplanation returns a Explanation. func NewExplanation() *Explanation { r := &Explanation{} diff --git a/typedapi/types/explanationdetail.go b/typedapi/types/explanationdetail.go old mode 100755 new mode 100644 index af4bcca789..64468c060e --- a/typedapi/types/explanationdetail.go +++ b/typedapi/types/explanationdetail.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExplanationDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/explain/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/explain/types.ts#L28-L32 type ExplanationDetail struct { Description string `json:"description"` Details []ExplanationDetail `json:"details,omitempty"` Value float32 `json:"value"` } +func (s *ExplanationDetail) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "details": + if err := dec.Decode(&s.Details); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Value = f + case float64: + f := float32(v) + s.Value = f + } + + } + } + return nil +} + // NewExplanationDetail returns a ExplanationDetail. func NewExplanationDetail() *ExplanationDetail { r := &ExplanationDetail{} diff --git a/typedapi/types/explorecontrols.go b/typedapi/types/explorecontrols.go old mode 100755 new mode 100644 index 68d3ed293d..2b78ff25e1 --- a/typedapi/types/explorecontrols.go +++ b/typedapi/types/explorecontrols.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExploreControls type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/ExploreControls.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/ExploreControls.ts#L24-L29 type ExploreControls struct { SampleDiversity *SampleDiversity `json:"sample_diversity,omitempty"` SampleSize *int `json:"sample_size,omitempty"` @@ -30,6 +40,66 @@ type ExploreControls struct { UseSignificance bool `json:"use_significance"` } +func (s *ExploreControls) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "sample_diversity": + if err := dec.Decode(&s.SampleDiversity); err != nil { + return err + } + + case "sample_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SampleSize = &value + case float64: + f := int(v) + s.SampleSize = &f + } + + case "timeout": + if err := dec.Decode(&s.Timeout); err != nil { + return err + } + + case "use_significance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UseSignificance = value + case bool: + s.UseSignificance = v + } + + } + } + return nil +} + // NewExploreControls returns a ExploreControls. func NewExploreControls() *ExploreControls { r := &ExploreControls{} diff --git a/typedapi/types/extendedboundsdouble.go b/typedapi/types/extendedboundsdouble.go old mode 100755 new mode 100644 index 8402f80fd2..519873a9f7 --- a/typedapi/types/extendedboundsdouble.go +++ b/typedapi/types/extendedboundsdouble.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExtendedBoundsdouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L230-L233 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L230-L233 type ExtendedBoundsdouble struct { Max Float64 `json:"max"` Min Float64 `json:"min"` } +func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f + } + + } + } + return nil +} + // NewExtendedBoundsdouble returns a ExtendedBoundsdouble. func NewExtendedBoundsdouble() *ExtendedBoundsdouble { r := &ExtendedBoundsdouble{} diff --git a/typedapi/types/extendedboundsfielddatemath.go b/typedapi/types/extendedboundsfielddatemath.go old mode 100755 new mode 100644 index 3836b9a317..82f10a92db --- a/typedapi/types/extendedboundsfielddatemath.go +++ b/typedapi/types/extendedboundsfielddatemath.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ExtendedBoundsFieldDateMath type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L230-L233 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L230-L233 type ExtendedBoundsFieldDateMath struct { Max FieldDateMath `json:"max"` Min FieldDateMath `json:"min"` } +func (s *ExtendedBoundsFieldDateMath) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "min": + if err := dec.Decode(&s.Min); err != nil { + return err + } + + } + } + return nil +} + // NewExtendedBoundsFieldDateMath returns a ExtendedBoundsFieldDateMath. func NewExtendedBoundsFieldDateMath() *ExtendedBoundsFieldDateMath { r := &ExtendedBoundsFieldDateMath{} diff --git a/typedapi/types/extendedmemorystats.go b/typedapi/types/extendedmemorystats.go old mode 100755 new mode 100644 index 752788b5a0..69390b56d1 --- a/typedapi/types/extendedmemorystats.go +++ b/typedapi/types/extendedmemorystats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExtendedMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L261-L264 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L261-L264 type ExtendedMemoryStats struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` @@ -38,6 +48,187 @@ type ExtendedMemoryStats struct { UsedPercent *int `json:"used_percent,omitempty"` } +func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adjusted_total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AdjustedTotalInBytes = &value + case float64: + f := int64(v) + s.AdjustedTotalInBytes = &f + } + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = &value + case float64: + f := int64(v) + s.FreeInBytes = &f + } + + case "free_percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FreePercent = &value + case float64: + f := int(v) + s.FreePercent = &f + } + + case "resident": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Resident = &o + + case "resident_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ResidentInBytes = &value + case float64: + f := int64(v) + s.ResidentInBytes = &f + } + + case "share": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Share = &o + + case "share_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShareInBytes = &value + case float64: + f := int64(v) + s.ShareInBytes = &f + } + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = &value + case float64: + f := int64(v) + s.TotalInBytes = &f + } + + case "total_virtual": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TotalVirtual = &o + + case "total_virtual_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalVirtualInBytes = &value + case float64: + f := int64(v) + s.TotalVirtualInBytes = &f + } + + case "used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedInBytes = &value + case float64: + f := int64(v) + s.UsedInBytes = &f + } + + case "used_percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UsedPercent = &value + case float64: + f := int(v) + s.UsedPercent = &f + } + + } + } + return nil +} + // NewExtendedMemoryStats returns a ExtendedMemoryStats. func NewExtendedMemoryStats() *ExtendedMemoryStats { r := &ExtendedMemoryStats{} diff --git a/typedapi/types/extendedstatsaggregate.go b/typedapi/types/extendedstatsaggregate.go old mode 100755 new mode 100644 index c8a4469027..eb3489f818 --- a/typedapi/types/extendedstatsaggregate.go +++ b/typedapi/types/extendedstatsaggregate.go @@ -16,24 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // ExtendedStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L277-L295 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L278-L296 type ExtendedStatsAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` StdDeviation Float64 `json:"std_deviation,omitempty"` @@ -54,6 +60,194 @@ type ExtendedStatsAggregate struct { VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` } +func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + if err := dec.Decode(&s.Avg); err != nil { + return err + } + + case "avg_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AvgAsString = &o + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "max_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min": + if err := dec.Decode(&s.Min); err != nil { + return err + } + + case "min_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinAsString = &o + + case "std_deviation": + if err := dec.Decode(&s.StdDeviation); err != nil { + return err + } + + case "std_deviation_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StdDeviationAsString = &o + + case "std_deviation_bounds": + if err := dec.Decode(&s.StdDeviationBounds); err != nil { + return err + } + + case "std_deviation_bounds_as_string": + if err := dec.Decode(&s.StdDeviationBoundsAsString); err != nil { + return err + } + + case "std_deviation_population": + if err := dec.Decode(&s.StdDeviationPopulation); err != nil { + return err + } + + case "std_deviation_sampling": + if err := dec.Decode(&s.StdDeviationSampling); err != nil { + return err + } + + case "sum": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sum = f + case float64: + f := Float64(v) + s.Sum = f + } + + case "sum_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumAsString = &o + + case "sum_of_squares": + if err := dec.Decode(&s.SumOfSquares); err != nil { + return err + } + + case "sum_of_squares_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumOfSquaresAsString = &o + + case "variance": + if err := dec.Decode(&s.Variance); err != nil { + return err + } + + case "variance_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VarianceAsString = &o + + case "variance_population": + if err := dec.Decode(&s.VariancePopulation); err != nil { + return err + } + + case "variance_population_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VariancePopulationAsString = &o + + case "variance_sampling": + if err := dec.Decode(&s.VarianceSampling); err != nil { + return err + } + + case "variance_sampling_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VarianceSamplingAsString = &o + + } + } + return nil +} + // NewExtendedStatsAggregate returns a ExtendedStatsAggregate. func NewExtendedStatsAggregate() *ExtendedStatsAggregate { r := &ExtendedStatsAggregate{} diff --git a/typedapi/types/extendedstatsaggregation.go b/typedapi/types/extendedstatsaggregation.go old mode 100755 new mode 100644 index 4c25d41080..f98efdb554 --- a/typedapi/types/extendedstatsaggregation.go +++ b/typedapi/types/extendedstatsaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ExtendedStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L68-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L68-L70 type ExtendedStatsAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -31,6 +41,65 @@ type ExtendedStatsAggregation struct { Sigma *Float64 `json:"sigma,omitempty"` } +func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "sigma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sigma = &f + case float64: + f := Float64(v) + s.Sigma = &f + } + + } + } + return nil +} + // NewExtendedStatsAggregation returns a ExtendedStatsAggregation. func NewExtendedStatsAggregation() *ExtendedStatsAggregation { r := &ExtendedStatsAggregation{} diff --git a/typedapi/types/extendedstatsbucketaggregate.go b/typedapi/types/extendedstatsbucketaggregate.go old mode 100755 new mode 100644 index 911f26ac00..0f1bf6d334 --- a/typedapi/types/extendedstatsbucketaggregate.go +++ b/typedapi/types/extendedstatsbucketaggregate.go @@ -16,24 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // ExtendedStatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L297-L298 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L298-L299 type ExtendedStatsBucketAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` StdDeviation Float64 `json:"std_deviation,omitempty"` @@ -54,6 +60,194 @@ type ExtendedStatsBucketAggregate struct { VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` } +func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + if err := dec.Decode(&s.Avg); err != nil { + return err + } + + case "avg_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AvgAsString = &o + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "max_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min": + if err := dec.Decode(&s.Min); err != nil { + return err + } + + case "min_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinAsString = &o + + case "std_deviation": + if err := dec.Decode(&s.StdDeviation); err != nil { + return err + } + + case "std_deviation_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StdDeviationAsString = &o + + case "std_deviation_bounds": + if err := dec.Decode(&s.StdDeviationBounds); err != nil { + return err + } + + case "std_deviation_bounds_as_string": + if err := dec.Decode(&s.StdDeviationBoundsAsString); err != nil { + return err + } + + case "std_deviation_population": + if err := dec.Decode(&s.StdDeviationPopulation); err != nil { + return err + } + + case "std_deviation_sampling": + if err := dec.Decode(&s.StdDeviationSampling); err != nil { + return err + } + + case "sum": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sum = f + case float64: + f := Float64(v) + s.Sum = f + } + + case "sum_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumAsString = &o + + case "sum_of_squares": + if err := dec.Decode(&s.SumOfSquares); err != nil { + return err + } + + case "sum_of_squares_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumOfSquaresAsString = &o + + case "variance": + if err := dec.Decode(&s.Variance); err != nil { + return err + } + + case "variance_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VarianceAsString = &o + + case "variance_population": + if err := dec.Decode(&s.VariancePopulation); err != nil { + return err + } + + case "variance_population_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VariancePopulationAsString = &o + + case "variance_sampling": + if err := dec.Decode(&s.VarianceSampling); err != nil { + return err + } + + case "variance_sampling_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VarianceSamplingAsString = &o + + } + } + return nil +} + // NewExtendedStatsBucketAggregate returns a ExtendedStatsBucketAggregate. func NewExtendedStatsBucketAggregate() *ExtendedStatsBucketAggregate { r := &ExtendedStatsBucketAggregate{} diff --git a/typedapi/types/extendedstatsbucketaggregation.go b/typedapi/types/extendedstatsbucketaggregation.go old mode 100755 new mode 100644 index 62e512795a..473f47a081 --- a/typedapi/types/extendedstatsbucketaggregation.go +++ b/typedapi/types/extendedstatsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,23 +27,26 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ExtendedStatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L167-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L167-L169 type ExtendedStatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Sigma *Float64 `json:"sigma,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Sigma *Float64 `json:"sigma,omitempty"` } func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +66,12 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -78,13 +84,27 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "sigma": - if err := dec.Decode(&s.Sigma); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sigma = &f + case float64: + f := Float64(v) + s.Sigma = &f } } diff --git a/typedapi/types/failprocessor.go b/typedapi/types/failprocessor.go old mode 100755 new mode 100644 index f0b1019f59..6b99b55270 --- a/typedapi/types/failprocessor.go +++ b/typedapi/types/failprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FailProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L211-L213 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L211-L213 type FailProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` @@ -32,6 +42,77 @@ type FailProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *FailProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "message": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Message = o + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewFailProcessor returns a FailProcessor. func NewFailProcessor() *FailProcessor { r := &FailProcessor{} diff --git a/typedapi/types/feature.go b/typedapi/types/feature.go old mode 100755 new mode 100644 index 61317b11e7..3c94c00a6d --- a/typedapi/types/feature.go +++ b/typedapi/types/feature.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Feature type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/features/_types/Feature.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/features/_types/Feature.ts#L20-L23 type Feature struct { Description string `json:"description"` Name string `json:"name"` diff --git a/typedapi/types/features.go b/typedapi/types/features.go old mode 100755 new mode 100644 index 84c68f1feb..ed6d2171da --- a/typedapi/types/features.go +++ b/typedapi/types/features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // Features type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get/IndicesGetRequest.ts#L94-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get/IndicesGetRequest.ts#L94-L94 type Features []feature.Feature diff --git a/typedapi/types/featuretoggle.go b/typedapi/types/featuretoggle.go old mode 100755 new mode 100644 index 7ea12aacc2..3a7e6f2809 --- a/typedapi/types/featuretoggle.go +++ b/typedapi/types/featuretoggle.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FeatureToggle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L40-L42 type FeatureToggle struct { Enabled bool `json:"enabled"` } +func (s *FeatureToggle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewFeatureToggle returns a FeatureToggle. func NewFeatureToggle() *FeatureToggle { r := &FeatureToggle{} diff --git a/typedapi/types/fetchprofile.go b/typedapi/types/fetchprofile.go old mode 100755 new mode 100644 index d3ef3251fa..40758b54f1 --- a/typedapi/types/fetchprofile.go +++ b/typedapi/types/fetchprofile.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FetchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L139-L146 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L139-L146 type FetchProfile struct { Breakdown FetchProfileBreakdown `json:"breakdown"` Children []FetchProfile `json:"children,omitempty"` @@ -32,6 +40,62 @@ type FetchProfile struct { Type string `json:"type"` } +func (s *FetchProfile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "breakdown": + if err := dec.Decode(&s.Breakdown); err != nil { + return err + } + + case "children": + if err := dec.Decode(&s.Children); err != nil { + return err + } + + case "debug": + if err := dec.Decode(&s.Debug); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "time_in_nanos": + if err := dec.Decode(&s.TimeInNanos); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewFetchProfile returns a FetchProfile. func NewFetchProfile() *FetchProfile { r := &FetchProfile{} diff --git a/typedapi/types/fetchprofilebreakdown.go b/typedapi/types/fetchprofilebreakdown.go old mode 100755 new mode 100644 index d25e087a65..dbeb23fa50 --- a/typedapi/types/fetchprofilebreakdown.go +++ b/typedapi/types/fetchprofilebreakdown.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FetchProfileBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L148-L157 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L148-L157 type FetchProfileBreakdown struct { LoadSource *int `json:"load_source,omitempty"` LoadSourceCount *int `json:"load_source_count,omitempty"` @@ -34,6 +44,154 @@ type FetchProfileBreakdown struct { ProcessCount *int `json:"process_count,omitempty"` } +func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "load_source": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LoadSource = &value + case float64: + f := int(v) + s.LoadSource = &f + } + + case "load_source_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LoadSourceCount = &value + case float64: + f := int(v) + s.LoadSourceCount = &f + } + + case "load_stored_fields": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LoadStoredFields = &value + case float64: + f := int(v) + s.LoadStoredFields = &f + } + + case "load_stored_fields_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LoadStoredFieldsCount = &value + case float64: + f := int(v) + s.LoadStoredFieldsCount = &f + } + + case "next_reader": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NextReader = &value + case float64: + f := int(v) + s.NextReader = &f + } + + case "next_reader_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NextReaderCount = &value + case float64: + f := int(v) + s.NextReaderCount = &f + } + + case "process": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Process = &value + case float64: + f := int(v) + s.Process = &f + } + + case "process_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ProcessCount = &value + case float64: + f := int(v) + s.ProcessCount = &f + } + + } + } + return nil +} + // NewFetchProfileBreakdown returns a FetchProfileBreakdown. func NewFetchProfileBreakdown() *FetchProfileBreakdown { r := &FetchProfileBreakdown{} diff --git a/typedapi/types/fetchprofiledebug.go b/typedapi/types/fetchprofiledebug.go old mode 100755 new mode 100644 index 210f96ce0c..a0db7efdf6 --- a/typedapi/types/fetchprofiledebug.go +++ b/typedapi/types/fetchprofiledebug.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FetchProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L159-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L159-L162 type FetchProfileDebug struct { FastPath *int `json:"fast_path,omitempty"` StoredFields []string `json:"stored_fields,omitempty"` } +func (s *FetchProfileDebug) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fast_path": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FastPath = &value + case float64: + f := int(v) + s.FastPath = &f + } + + case "stored_fields": + if err := dec.Decode(&s.StoredFields); err != nil { + return err + } + + } + } + return nil +} + // NewFetchProfileDebug returns a FetchProfileDebug. func NewFetchProfileDebug() *FetchProfileDebug { r := &FetchProfileDebug{} diff --git a/typedapi/types/fieldaliasproperty.go b/typedapi/types/fieldaliasproperty.go old mode 100755 new mode 100644 index 6b9d13bb77..caa440b43a --- a/typedapi/types/fieldaliasproperty.go +++ b/typedapi/types/fieldaliasproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // FieldAliasProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L49-L52 type FieldAliasProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -45,6 +47,7 @@ type FieldAliasProperty struct { } func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,6 +67,9 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -351,18 +357,32 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -373,6 +393,9 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -660,7 +683,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/fieldandformat.go b/typedapi/types/fieldandformat.go old mode 100755 new mode 100644 index 1cb1c843ae..dff05de875 --- a/typedapi/types/fieldandformat.go +++ b/typedapi/types/fieldandformat.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldAndFormat type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L212-L226 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L212-L226 type FieldAndFormat struct { // Field Wildcard pattern. The request returns values for field names matching this // pattern. @@ -32,6 +42,58 @@ type FieldAndFormat struct { IncludeUnmapped *bool `json:"include_unmapped,omitempty"` } +func (s *FieldAndFormat) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Field) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "include_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeUnmapped = &value + case bool: + s.IncludeUnmapped = &v + } + + } + } + return nil +} + // NewFieldAndFormat returns a FieldAndFormat. func NewFieldAndFormat() *FieldAndFormat { r := &FieldAndFormat{} diff --git a/typedapi/types/fieldcapability.go b/typedapi/types/fieldcapability.go old mode 100755 new mode 100644 index 112d33d56b..65e6bae67b --- a/typedapi/types/fieldcapability.go +++ b/typedapi/types/fieldcapability.go @@ -16,19 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/timeseriesmetrictype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // FieldCapability type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/field_caps/types.ts#L23-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/field_caps/types.ts#L23-L81 type FieldCapability struct { // Aggregatable Whether this field can be aggregated on all indices. Aggregatable bool `json:"aggregatable"` @@ -39,7 +45,7 @@ type FieldCapability struct { // values. A value length of 1 indicates that all indices had the same value for // this key, while a length of 2 or more indicates that not all indices had the // same value for this key. - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // MetadataField Whether this field is registered as a metadata field. MetadataField *bool `json:"metadata_field,omitempty"` // MetricConflictsIndices The list of indices where this field is present if these indices @@ -64,6 +70,158 @@ type FieldCapability struct { Type string `json:"type"` } +func (s *FieldCapability) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregatable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Aggregatable = value + case bool: + s.Aggregatable = v + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "metadata_field": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MetadataField = &value + case bool: + s.MetadataField = &v + } + + case "metric_conflicts_indices": + if err := dec.Decode(&s.MetricConflictsIndices); err != nil { + return err + } + + case "non_aggregatable_indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.NonAggregatableIndices = append(s.NonAggregatableIndices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NonAggregatableIndices); err != nil { + return err + } + } + + case "non_dimension_indices": + if err := dec.Decode(&s.NonDimensionIndices); err != nil { + return err + } + + case "non_searchable_indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.NonSearchableIndices = append(s.NonSearchableIndices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.NonSearchableIndices); err != nil { + return err + } + } + + case "searchable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Searchable = value + case bool: + s.Searchable = v + } + + case "time_series_dimension": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v + } + + case "time_series_metric": + if err := dec.Decode(&s.TimeSeriesMetric); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewFieldCapability returns a FieldCapability. func NewFieldCapability() *FieldCapability { r := &FieldCapability{} diff --git a/typedapi/types/fieldcollapse.go b/typedapi/types/fieldcollapse.go old mode 100755 new mode 100644 index cef677850a..33d9f4177c --- a/typedapi/types/fieldcollapse.go +++ b/typedapi/types/fieldcollapse.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldCollapse type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/FieldCollapse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/FieldCollapse.ts#L24-L29 type FieldCollapse struct { Collapse *FieldCollapse `json:"collapse,omitempty"` Field string `json:"field"` @@ -30,6 +40,68 @@ type FieldCollapse struct { MaxConcurrentGroupSearches *int `json:"max_concurrent_group_searches,omitempty"` } +func (s *FieldCollapse) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collapse": + if err := dec.Decode(&s.Collapse); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "inner_hits": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewInnerHits() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.InnerHits = append(s.InnerHits, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.InnerHits); err != nil { + return err + } + } + + case "max_concurrent_group_searches": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxConcurrentGroupSearches = &value + case float64: + f := int(v) + s.MaxConcurrentGroupSearches = &f + } + + } + } + return nil +} + // NewFieldCollapse returns a FieldCollapse. func NewFieldCollapse() *FieldCollapse { r := &FieldCollapse{} diff --git a/typedapi/types/fielddatafrequencyfilter.go b/typedapi/types/fielddatafrequencyfilter.go old mode 100755 new mode 100644 index 610c3c7251..c99dfb4bdf --- a/typedapi/types/fielddatafrequencyfilter.go +++ b/typedapi/types/fielddatafrequencyfilter.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FielddataFrequencyFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 type FielddataFrequencyFilter struct { Max Float64 `json:"max"` Min Float64 `json:"min"` MinSegmentSize int `json:"min_segment_size"` } +func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f + } + + case "min_segment_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinSegmentSize = value + case float64: + f := int(v) + s.MinSegmentSize = f + } + + } + } + return nil +} + // NewFielddataFrequencyFilter returns a FielddataFrequencyFilter. func NewFielddataFrequencyFilter() *FielddataFrequencyFilter { r := &FielddataFrequencyFilter{} diff --git a/typedapi/types/fielddatarecord.go b/typedapi/types/fielddatarecord.go old mode 100755 new mode 100644 index dff2e2137f..ec9f73c07a --- a/typedapi/types/fielddatarecord.go +++ b/typedapi/types/fielddatarecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // FielddataRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/fielddata/types.ts#L20-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/fielddata/types.ts#L20-L48 type FielddataRecord struct { // Field field name Field *string `json:"field,omitempty"` diff --git a/typedapi/types/fielddatastats.go b/typedapi/types/fielddatastats.go old mode 100755 new mode 100644 index 0d586c59fa..48ee13dd85 --- a/typedapi/types/fielddatastats.go +++ b/typedapi/types/fielddatastats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FielddataStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L69-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L69-L74 type FielddataStats struct { Evictions *int64 `json:"evictions,omitempty"` Fields map[string]FieldMemoryUsage `json:"fields,omitempty"` @@ -30,6 +40,69 @@ type FielddataStats struct { MemorySizeInBytes int64 `json:"memory_size_in_bytes"` } +func (s *FielddataStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Evictions = &value + case float64: + f := int64(v) + s.Evictions = &f + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]FieldMemoryUsage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "memory_size": + if err := dec.Decode(&s.MemorySize); err != nil { + return err + } + + case "memory_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemorySizeInBytes = value + case float64: + f := int64(v) + s.MemorySizeInBytes = f + } + + } + } + return nil +} + // NewFielddataStats returns a FielddataStats. func NewFielddataStats() *FielddataStats { r := &FielddataStats{ diff --git a/typedapi/types/fielddatemath.go b/typedapi/types/fielddatemath.go old mode 100755 new mode 100644 index 1090c156cd..103fc9e843 --- a/typedapi/types/fielddatemath.go +++ b/typedapi/types/fielddatemath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // Float64 // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L140-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L140-L147 type FieldDateMath interface{} diff --git a/typedapi/types/fieldlookup.go b/typedapi/types/fieldlookup.go old mode 100755 new mode 100644 index e241c51a57..dc9408274e --- a/typedapi/types/fieldlookup.go +++ b/typedapi/types/fieldlookup.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FieldLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L164-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L164-L169 type FieldLookup struct { Id string `json:"id"` Index *string `json:"index,omitempty"` @@ -30,6 +38,46 @@ type FieldLookup struct { Routing *string `json:"routing,omitempty"` } +func (s *FieldLookup) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + } + } + return nil +} + // NewFieldLookup returns a FieldLookup. func NewFieldLookup() *FieldLookup { r := &FieldLookup{} diff --git a/typedapi/types/fieldmapping.go b/typedapi/types/fieldmapping.go old mode 100755 new mode 100644 index acefee731d..24c85622e2 --- a/typedapi/types/fieldmapping.go +++ b/typedapi/types/fieldmapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // FieldMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L24-L27 type FieldMapping struct { FullName string `json:"full_name"` Mapping map[string]Property `json:"mapping"` } func (s *FieldMapping) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -51,11 +52,17 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { switch t { case "full_name": - if err := dec.Decode(&s.FullName); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.FullName = o case "mapping": + if s.Mapping == nil { + s.Mapping = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -343,7 +350,7 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { } s.Mapping[key] = oo default: - if err := dec.Decode(&s.Mapping); err != nil { + if err := localDec.Decode(&s.Mapping); err != nil { return err } } diff --git a/typedapi/types/fieldmemoryusage.go b/typedapi/types/fieldmemoryusage.go old mode 100755 new mode 100644 index 6114ffa343..52a59a4f55 --- a/typedapi/types/fieldmemoryusage.go +++ b/typedapi/types/fieldmemoryusage.go @@ -16,18 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L76-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L76-L79 type FieldMemoryUsage struct { MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` } +func (s *FieldMemoryUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "memory_size": + if err := dec.Decode(&s.MemorySize); err != nil { + return err + } + + case "memory_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemorySizeInBytes = value + case float64: + f := int64(v) + s.MemorySizeInBytes = f + } + + } + } + return nil +} + // NewFieldMemoryUsage returns a FieldMemoryUsage. func NewFieldMemoryUsage() *FieldMemoryUsage { r := &FieldMemoryUsage{} diff --git a/typedapi/types/fieldmetric.go b/typedapi/types/fieldmetric.go old mode 100755 new mode 100644 index d71d39869f..9d2281b661 --- a/typedapi/types/fieldmetric.go +++ b/typedapi/types/fieldmetric.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/metric" + + "bytes" + "errors" + "io" + + "encoding/json" ) // FieldMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Metric.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Metric.ts#L30-L35 type FieldMetric struct { // Field The field to collect metrics for. This must be a numeric of some kind. Field string `json:"field"` @@ -35,6 +41,36 @@ type FieldMetric struct { Metrics []metric.Metric `json:"metrics"` } +func (s *FieldMetric) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "metrics": + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + } + } + return nil +} + // NewFieldMetric returns a FieldMetric. func NewFieldMetric() *FieldMetric { r := &FieldMetric{} diff --git a/typedapi/types/fieldnamesfield.go b/typedapi/types/fieldnamesfield.go old mode 100755 new mode 100644 index 1062155418..4bba6ae6ce --- a/typedapi/types/fieldnamesfield.go +++ b/typedapi/types/fieldnamesfield.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldNamesField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L42-L44 type FieldNamesField struct { Enabled bool `json:"enabled"` } +func (s *FieldNamesField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewFieldNamesField returns a FieldNamesField. func NewFieldNamesField() *FieldNamesField { r := &FieldNamesField{} diff --git a/typedapi/types/fieldrule.go b/typedapi/types/fieldrule.go old mode 100755 new mode 100644 index 52d22ea628..b5576b184e --- a/typedapi/types/fieldrule.go +++ b/typedapi/types/fieldrule.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // FieldRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleMappingRule.ts#L33-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleMappingRule.ts#L33-L42 type FieldRule struct { Dn []string `json:"dn,omitempty"` Groups []string `json:"groups,omitempty"` @@ -35,6 +39,73 @@ type FieldRule struct { Username *string `json:"username,omitempty"` } +func (s *FieldRule) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dn": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Dn = append(s.Dn, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Dn); err != nil { + return err + } + } + + case "groups": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Groups = append(s.Groups, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Groups); err != nil { + return err + } + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "realm": + if err := dec.Decode(&s.Realm); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil +} + // NewFieldRule returns a FieldRule. func NewFieldRule() *FieldRule { r := &FieldRule{} diff --git a/typedapi/types/fields.go b/typedapi/types/fields.go old mode 100755 new mode 100644 index dfe56b686f..1f79087496 --- a/typedapi/types/fields.go +++ b/typedapi/types/fields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Fields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L120-L120 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L120-L120 type Fields []string diff --git a/typedapi/types/fieldsecurity.go b/typedapi/types/fieldsecurity.go old mode 100755 new mode 100644 index a2352b220b..fcdba329e2 --- a/typedapi/types/fieldsecurity.go +++ b/typedapi/types/fieldsecurity.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FieldSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/FieldSecurity.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/FieldSecurity.ts#L22-L25 type FieldSecurity struct { Except []string `json:"except,omitempty"` Grant []string `json:"grant,omitempty"` } +func (s *FieldSecurity) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "except": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Except = append(s.Except, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Except); err != nil { + return err + } + } + + case "grant": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Grant = append(s.Grant, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Grant); err != nil { + return err + } + } + + } + } + return nil +} + // NewFieldSecurity returns a FieldSecurity. func NewFieldSecurity() *FieldSecurity { r := &FieldSecurity{} diff --git a/typedapi/types/fieldsizeusage.go b/typedapi/types/fieldsizeusage.go old mode 100755 new mode 100644 index c30a35e8d4..5e05141b42 --- a/typedapi/types/fieldsizeusage.go +++ b/typedapi/types/fieldsizeusage.go @@ -16,18 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldSizeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L59-L62 type FieldSizeUsage struct { Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` } +func (s *FieldSizeUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewFieldSizeUsage returns a FieldSizeUsage. func NewFieldSizeUsage() *FieldSizeUsage { r := &FieldSizeUsage{} diff --git a/typedapi/types/fieldsort.go b/typedapi/types/fieldsort.go old mode 100755 new mode 100644 index da7f975b1d..d35cc2a1a8 --- a/typedapi/types/fieldsort.go +++ b/typedapi/types/fieldsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,11 +25,17 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/fieldtype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortorder" + + "bytes" + "errors" + "io" + + "encoding/json" ) // FieldSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L44-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L44-L53 type FieldSort struct { Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` @@ -40,6 +46,69 @@ type FieldSort struct { UnmappedType *fieldtype.FieldType `json:"unmapped_type,omitempty"` } +func (s *FieldSort) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Order) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "nested": + if err := dec.Decode(&s.Nested); err != nil { + return err + } + + case "numeric_type": + if err := dec.Decode(&s.NumericType); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "unmapped_type": + if err := dec.Decode(&s.UnmappedType); err != nil { + return err + } + + } + } + return nil +} + // NewFieldSort returns a FieldSort. func NewFieldSort() *FieldSort { r := &FieldSort{} diff --git a/typedapi/types/fieldstatistics.go b/typedapi/types/fieldstatistics.go old mode 100755 new mode 100644 index af626b3b36..d4c3dc5ace --- a/typedapi/types/fieldstatistics.go +++ b/typedapi/types/fieldstatistics.go @@ -16,19 +16,95 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/types.ts#L28-L32 type FieldStatistics struct { DocCount int `json:"doc_count"` SumDocFreq int64 `json:"sum_doc_freq"` SumTtf int64 `json:"sum_ttf"` } +func (s *FieldStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int(v) + s.DocCount = f + } + + case "sum_doc_freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumDocFreq = value + case float64: + f := int64(v) + s.SumDocFreq = f + } + + case "sum_ttf": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumTtf = value + case float64: + f := int64(v) + s.SumTtf = f + } + + } + } + return nil +} + // NewFieldStatistics returns a FieldStatistics. func NewFieldStatistics() *FieldStatistics { r := &FieldStatistics{} diff --git a/typedapi/types/fieldsuggester.go b/typedapi/types/fieldsuggester.go old mode 100755 new mode 100644 index 11d766f644..0b7a3dede7 --- a/typedapi/types/fieldsuggester.go +++ b/typedapi/types/fieldsuggester.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // FieldSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L106-L120 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L106-L120 type FieldSuggester struct { Completion *CompletionSuggester `json:"completion,omitempty"` Phrase *PhraseSuggester `json:"phrase,omitempty"` diff --git a/typedapi/types/fieldsummary.go b/typedapi/types/fieldsummary.go old mode 100755 new mode 100644 index ea85405755..bf44a7f9c5 --- a/typedapi/types/fieldsummary.go +++ b/typedapi/types/fieldsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // FieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L54-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L54-L63 type FieldSummary struct { Any uint `json:"any"` DocValues uint `json:"doc_values"` diff --git a/typedapi/types/fieldsusagebody.go b/typedapi/types/fieldsusagebody.go old mode 100755 new mode 100644 index b50298d54f..779ba343aa --- a/typedapi/types/fieldsusagebody.go +++ b/typedapi/types/fieldsusagebody.go @@ -16,23 +16,66 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // FieldsUsageBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L36 type FieldsUsageBody struct { FieldsUsageBody map[string]UsageStatsIndex `json:"-"` Shards_ ShardStatistics `json:"_shards"` } +func (s *FieldsUsageBody) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_shards": + if err := dec.Decode(&s.Shards_); err != nil { + return err + } + + default: + + if key, ok := t.(string); ok { + if s.FieldsUsageBody == nil { + s.FieldsUsageBody = make(map[string]UsageStatsIndex, 0) + } + raw := NewUsageStatsIndex() + if err := dec.Decode(&raw); err != nil { + return err + } + s.FieldsUsageBody[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s FieldsUsageBody) MarshalJSON() ([]byte, error) { type opt FieldsUsageBody @@ -52,6 +95,7 @@ func (s FieldsUsageBody) MarshalJSON() ([]byte, error) { for key, value := range s.FieldsUsageBody { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "FieldsUsageBody") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/fieldtypes.go b/typedapi/types/fieldtypes.go old mode 100755 new mode 100644 index 61e85d07bf..0abb2d0f44 --- a/typedapi/types/fieldtypes.go +++ b/typedapi/types/fieldtypes.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L105-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L105-L114 type FieldTypes struct { Count int `json:"count"` IndexCount int `json:"index_count"` @@ -33,6 +43,124 @@ type FieldTypes struct { ScriptCount *int `json:"script_count,omitempty"` } +func (s *FieldTypes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "index_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IndexCount = value + case float64: + f := int(v) + s.IndexCount = f + } + + case "indexed_vector_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexedVectorCount = &value + case float64: + f := int64(v) + s.IndexedVectorCount = &f + } + + case "indexed_vector_dim_max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexedVectorDimMax = &value + case float64: + f := int64(v) + s.IndexedVectorDimMax = &f + } + + case "indexed_vector_dim_min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexedVectorDimMin = &value + case float64: + f := int64(v) + s.IndexedVectorDimMin = &f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "script_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ScriptCount = &value + case float64: + f := int(v) + s.ScriptCount = &f + } + + } + } + return nil +} + // NewFieldTypes returns a FieldTypes. func NewFieldTypes() *FieldTypes { r := &FieldTypes{} diff --git a/typedapi/types/fieldtypesmappings.go b/typedapi/types/fieldtypesmappings.go old mode 100755 new mode 100644 index 76932f6f3e..ed5798d9c2 --- a/typedapi/types/fieldtypesmappings.go +++ b/typedapi/types/fieldtypesmappings.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FieldTypesMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L96-L103 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L96-L103 type FieldTypesMappings struct { FieldTypes []FieldTypes `json:"field_types"` RuntimeFieldTypes []ClusterRuntimeFieldTypes `json:"runtime_field_types,omitempty"` @@ -32,6 +42,88 @@ type FieldTypesMappings struct { TotalFieldCount *int `json:"total_field_count,omitempty"` } +func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field_types": + if err := dec.Decode(&s.FieldTypes); err != nil { + return err + } + + case "runtime_field_types": + if err := dec.Decode(&s.RuntimeFieldTypes); err != nil { + return err + } + + case "total_deduplicated_field_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalDeduplicatedFieldCount = &value + case float64: + f := int(v) + s.TotalDeduplicatedFieldCount = &f + } + + case "total_deduplicated_mapping_size": + if err := dec.Decode(&s.TotalDeduplicatedMappingSize); err != nil { + return err + } + + case "total_deduplicated_mapping_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalDeduplicatedMappingSizeInBytes = &value + case float64: + f := int64(v) + s.TotalDeduplicatedMappingSizeInBytes = &f + } + + case "total_field_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalFieldCount = &value + case float64: + f := int(v) + s.TotalFieldCount = &f + } + + } + } + return nil +} + // NewFieldTypesMappings returns a FieldTypesMappings. func NewFieldTypesMappings() *FieldTypesMappings { r := &FieldTypesMappings{} diff --git a/typedapi/types/fieldvalue.go b/typedapi/types/fieldvalue.go old mode 100755 new mode 100644 index 0a8dd1a4f0..e91e8d8c34 --- a/typedapi/types/fieldvalue.go +++ b/typedapi/types/fieldvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,5 +29,5 @@ package types // nil // json.RawMessage // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L25-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L25-L37 type FieldValue interface{} diff --git a/typedapi/types/fieldvaluefactorscorefunction.go b/typedapi/types/fieldvaluefactorscorefunction.go old mode 100755 new mode 100644 index 22dc4fe942..706772ebac --- a/typedapi/types/fieldvaluefactorscorefunction.go +++ b/typedapi/types/fieldvaluefactorscorefunction.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/fieldvaluefactormodifier" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // FieldValueFactorScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L70-L75 type FieldValueFactorScoreFunction struct { Factor *Float64 `json:"factor,omitempty"` Field string `json:"field"` @@ -34,6 +42,68 @@ type FieldValueFactorScoreFunction struct { Modifier *fieldvaluefactormodifier.FieldValueFactorModifier `json:"modifier,omitempty"` } +func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Factor = &f + case float64: + f := Float64(v) + s.Factor = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Missing = &f + case float64: + f := Float64(v) + s.Missing = &f + } + + case "modifier": + if err := dec.Decode(&s.Modifier); err != nil { + return err + } + + } + } + return nil +} + // NewFieldValueFactorScoreFunction returns a FieldValueFactorScoreFunction. func NewFieldValueFactorScoreFunction() *FieldValueFactorScoreFunction { r := &FieldValueFactorScoreFunction{} diff --git a/typedapi/types/filecountsnapshotstats.go b/typedapi/types/filecountsnapshotstats.go old mode 100755 new mode 100644 index af8e033815..a4b32d630c --- a/typedapi/types/filecountsnapshotstats.go +++ b/typedapi/types/filecountsnapshotstats.go @@ -16,18 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FileCountSnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 type FileCountSnapshotStats struct { FileCount int `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` } +func (s *FileCountSnapshotStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "file_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FileCount = value + case float64: + f := int(v) + s.FileCount = f + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewFileCountSnapshotStats returns a FileCountSnapshotStats. func NewFileCountSnapshotStats() *FileCountSnapshotStats { r := &FileCountSnapshotStats{} diff --git a/typedapi/types/filedetails.go b/typedapi/types/filedetails.go old mode 100755 new mode 100644 index 4d7a48e0ac..19da7a0b66 --- a/typedapi/types/filedetails.go +++ b/typedapi/types/filedetails.go @@ -16,19 +16,87 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FileDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L50-L54 type FileDetails struct { Length int64 `json:"length"` Name string `json:"name"` Recovered int64 `json:"recovered"` } +func (s *FileDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "length": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Length = value + case float64: + f := int64(v) + s.Length = f + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = o + + case "recovered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Recovered = value + case float64: + f := int64(v) + s.Recovered = f + } + + } + } + return nil +} + // NewFileDetails returns a FileDetails. func NewFileDetails() *FileDetails { r := &FileDetails{} diff --git a/typedapi/types/filesystem.go b/typedapi/types/filesystem.go old mode 100755 new mode 100644 index f2e6c8b6c7..69c3c99924 --- a/typedapi/types/filesystem.go +++ b/typedapi/types/filesystem.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L286-L291 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L286-L291 type FileSystem struct { Data []DataPathStats `json:"data,omitempty"` IoStats *IoStats `json:"io_stats,omitempty"` @@ -30,6 +40,56 @@ type FileSystem struct { Total *FileSystemTotal `json:"total,omitempty"` } +func (s *FileSystem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data": + if err := dec.Decode(&s.Data); err != nil { + return err + } + + case "io_stats": + if err := dec.Decode(&s.IoStats); err != nil { + return err + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewFileSystem returns a FileSystem. func NewFileSystem() *FileSystem { r := &FileSystem{} diff --git a/typedapi/types/filesystemtotal.go b/typedapi/types/filesystemtotal.go old mode 100755 new mode 100644 index 541e311f8e..2918bfe34c --- a/typedapi/types/filesystemtotal.go +++ b/typedapi/types/filesystemtotal.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FileSystemTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L307-L314 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L307-L314 type FileSystemTotal struct { Available *string `json:"available,omitempty"` AvailableInBytes *int64 `json:"available_in_bytes,omitempty"` @@ -32,6 +42,95 @@ type FileSystemTotal struct { TotalInBytes *int64 `json:"total_in_bytes,omitempty"` } +func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Available = &o + + case "available_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AvailableInBytes = &value + case float64: + f := int64(v) + s.AvailableInBytes = &f + } + + case "free": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Free = &o + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = &value + case float64: + f := int64(v) + s.FreeInBytes = &f + } + + case "total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Total = &o + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = &value + case float64: + f := int64(v) + s.TotalInBytes = &f + } + + } + } + return nil +} + // NewFileSystemTotal returns a FileSystemTotal. func NewFileSystemTotal() *FileSystemTotal { r := &FileSystemTotal{} diff --git a/typedapi/types/fillmaskinferenceoptions.go b/typedapi/types/fillmaskinferenceoptions.go old mode 100755 new mode 100644 index 5c2111d01c..c4610bc3c4 --- a/typedapi/types/fillmaskinferenceoptions.go +++ b/typedapi/types/fillmaskinferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FillMaskInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L241-L249 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L241-L249 type FillMaskInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -33,6 +43,55 @@ type FillMaskInferenceOptions struct { Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` } +func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewFillMaskInferenceOptions returns a FillMaskInferenceOptions. func NewFillMaskInferenceOptions() *FillMaskInferenceOptions { r := &FillMaskInferenceOptions{} diff --git a/typedapi/types/fillmaskinferenceupdateoptions.go b/typedapi/types/fillmaskinferenceupdateoptions.go old mode 100755 new mode 100644 index 8d81fa2fb4..d3beab0e14 --- a/typedapi/types/fillmaskinferenceupdateoptions.go +++ b/typedapi/types/fillmaskinferenceupdateoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FillMaskInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L370-L377 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L370-L377 type FillMaskInferenceUpdateOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -33,6 +43,55 @@ type FillMaskInferenceUpdateOptions struct { Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` } +func (s *FillMaskInferenceUpdateOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewFillMaskInferenceUpdateOptions returns a FillMaskInferenceUpdateOptions. func NewFillMaskInferenceUpdateOptions() *FillMaskInferenceUpdateOptions { r := &FillMaskInferenceUpdateOptions{} diff --git a/typedapi/types/filteraggregate.go b/typedapi/types/filteraggregate.go old mode 100755 new mode 100644 index 5bacf91276..b339f4af9a --- a/typedapi/types/filteraggregate.go +++ b/typedapi/types/filteraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // FilterAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L494-L495 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L495-L496 type FilterAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *FilterAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s FilterAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/filterref.go b/typedapi/types/filterref.go old mode 100755 new mode 100644 index 4f639709a2..2b19843376 --- a/typedapi/types/filterref.go +++ b/typedapi/types/filterref.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/filtertype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // FilterRef type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Filter.ts#L31-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Filter.ts#L31-L41 type FilterRef struct { // FilterId The identifier for the filter. FilterId string `json:"filter_id"` @@ -35,6 +41,36 @@ type FilterRef struct { FilterType *filtertype.FilterType `json:"filter_type,omitempty"` } +func (s *FilterRef) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter_id": + if err := dec.Decode(&s.FilterId); err != nil { + return err + } + + case "filter_type": + if err := dec.Decode(&s.FilterType); err != nil { + return err + } + + } + } + return nil +} + // NewFilterRef returns a FilterRef. func NewFilterRef() *FilterRef { r := &FilterRef{} diff --git a/typedapi/types/filtersaggregate.go b/typedapi/types/filtersaggregate.go old mode 100755 new mode 100644 index ddef78e2b9..c3a767faaf --- a/typedapi/types/filtersaggregate.go +++ b/typedapi/types/filtersaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // FiltersAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L567-L568 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L568-L569 type FiltersAggregate struct { - Buckets BucketsFiltersBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsFiltersBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *FiltersAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *FiltersAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]FiltersBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []FiltersBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/filtersaggregation.go b/typedapi/types/filtersaggregation.go old mode 100755 new mode 100644 index 4e34ccc676..8ae7786f76 --- a/typedapi/types/filtersaggregation.go +++ b/typedapi/types/filtersaggregation.go @@ -16,24 +16,104 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // FiltersAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L169-L174 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L169-L174 type FiltersAggregation struct { - Filters *BucketsQuery `json:"filters,omitempty"` - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - OtherBucket *bool `json:"other_bucket,omitempty"` - OtherBucketKey *string `json:"other_bucket_key,omitempty"` + Filters *BucketsQuery `json:"filters,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + OtherBucket *bool `json:"other_bucket,omitempty"` + OtherBucketKey *string `json:"other_bucket_key,omitempty"` +} + +func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filters": + if err := dec.Decode(&s.Filters); err != nil { + return err + } + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "other_bucket": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OtherBucket = &value + case bool: + s.OtherBucket = &v + } + + case "other_bucket_key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.OtherBucketKey = &o + + } + } + return nil } // NewFiltersAggregation returns a FiltersAggregation. diff --git a/typedapi/types/filtersbucket.go b/typedapi/types/filtersbucket.go old mode 100755 new mode 100644 index e95f30dd32..c63abe0454 --- a/typedapi/types/filtersbucket.go +++ b/typedapi/types/filtersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,18 +29,21 @@ import ( "strings" + "strconv" + "encoding/json" ) // FiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L570-L570 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L571-L571 type FiltersBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` } func (s *FiltersBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -54,453 +57,534 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f + } + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o } - s.Aggregations[value] = o + } else { + return errors.New("cannot decode JSON for field Aggregations") } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o } } - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err - } - } } return nil @@ -525,6 +609,7 @@ func (s FiltersBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/fingerprintanalyzer.go b/typedapi/types/fingerprintanalyzer.go old mode 100755 new mode 100644 index 734409d48a..cab070650e --- a/typedapi/types/fingerprintanalyzer.go +++ b/typedapi/types/fingerprintanalyzer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FingerprintAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L37-L45 type FingerprintAnalyzer struct { MaxOutputSize int `json:"max_output_size"` PreserveOriginal bool `json:"preserve_original"` @@ -33,6 +43,98 @@ type FingerprintAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_output_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOutputSize = value + case float64: + f := int(v) + s.MaxOutputSize = f + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = value + case bool: + s.PreserveOriginal = v + } + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = o + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "stopwords_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StopwordsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewFingerprintAnalyzer returns a FingerprintAnalyzer. func NewFingerprintAnalyzer() *FingerprintAnalyzer { r := &FingerprintAnalyzer{} diff --git a/typedapi/types/fingerprinttokenfilter.go b/typedapi/types/fingerprinttokenfilter.go old mode 100755 new mode 100644 index 87e90acc72..5a214127a9 --- a/typedapi/types/fingerprinttokenfilter.go +++ b/typedapi/types/fingerprinttokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FingerprintTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L193-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L194-L198 type FingerprintTokenFilter struct { MaxOutputSize *int `json:"max_output_size,omitempty"` Separator *string `json:"separator,omitempty"` @@ -30,6 +40,60 @@ type FingerprintTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *FingerprintTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_output_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOutputSize = &value + case float64: + f := int(v) + s.MaxOutputSize = &f + } + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewFingerprintTokenFilter returns a FingerprintTokenFilter. func NewFingerprintTokenFilter() *FingerprintTokenFilter { r := &FingerprintTokenFilter{} diff --git a/typedapi/types/flattened.go b/typedapi/types/flattened.go old mode 100755 new mode 100644 index 8e72a84914..998932b08b --- a/typedapi/types/flattened.go +++ b/typedapi/types/flattened.go @@ -16,19 +16,93 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Flattened type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L347-L349 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L347-L349 type Flattened struct { Available bool `json:"available"` Enabled bool `json:"enabled"` FieldCount int `json:"field_count"` } +func (s *Flattened) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "field_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FieldCount = value + case float64: + f := int(v) + s.FieldCount = f + } + + } + } + return nil +} + // NewFlattened returns a Flattened. func NewFlattened() *Flattened { r := &Flattened{} diff --git a/typedapi/types/flattenedproperty.go b/typedapi/types/flattenedproperty.go old mode 100755 new mode 100644 index 3ee6d78683..6cd1163ebd --- a/typedapi/types/flattenedproperty.go +++ b/typedapi/types/flattenedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // FlattenedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/complex.ts#L26-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/complex.ts#L26-L37 type FlattenedProperty struct { Boost *Float64 `json:"boost,omitempty"` DepthLimit *int `json:"depth_limit,omitempty"` @@ -54,6 +56,7 @@ type FlattenedProperty struct { } func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -68,18 +71,49 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "depth_limit": - if err := dec.Decode(&s.DepthLimit); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DepthLimit = &value + case float64: + f := int(v) + s.DepthLimit = &f } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -88,11 +122,23 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - if err := dec.Decode(&s.EagerGlobalOrdinals); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EagerGlobalOrdinals = &value + case bool: + s.EagerGlobalOrdinals = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -380,20 +426,40 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -402,16 +468,25 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.NullValue = &o case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -699,20 +774,32 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "split_queries_on_whitespace": - if err := dec.Decode(&s.SplitQueriesOnWhitespace); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitQueriesOnWhitespace = &value + case bool: + s.SplitQueriesOnWhitespace = &v } case "type": diff --git a/typedapi/types/float64.go b/typedapi/types/float64.go index 58e65ad2bb..8aff9d78b0 100644 --- a/typedapi/types/float64.go +++ b/typedapi/types/float64.go @@ -31,20 +31,23 @@ func (f Float64) MarshalJSON() ([]byte, error) { var s string switch { case math.IsInf(float64(f), 1): - s = "Infinity" + s = `"Infinity"` case math.IsInf(float64(f), -1): - s = "-Infinity" + s = `"-Infinity"` case math.IsNaN(float64(f)): - s = "NaN" + s = `"NaN"` default: s = strconv.FormatFloat(float64(f), 'f', -1, 64) } - return []byte(`"` + s + `"`), nil + return []byte(s), nil } // UnmarshalJSON implements Unmarshaler interface. func (f *Float64) UnmarshalJSON(data []byte) error { switch { + case bytes.Equal(data, []byte(`"NaN"`)): + nan := Float64(math.NaN()) + f = &nan case bytes.Equal(data, []byte(`null`)): return nil default: diff --git a/typedapi/types/floatnumberproperty.go b/typedapi/types/floatnumberproperty.go old mode 100755 new mode 100644 index 2b37191887..a4cd652923 --- a/typedapi/types/floatnumberproperty.go +++ b/typedapi/types/floatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // FloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L131-L134 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L131-L134 type FloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type FloatNumberProperty struct { } func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,35 +435,78 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.NullValue = &f + case float64: + f := float32(v) + s.NullValue = &f } case "on_script_error": @@ -426,6 +515,9 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +805,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +817,39 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/floatrangeproperty.go b/typedapi/types/floatrangeproperty.go old mode 100755 new mode 100644 index 38e50838ea..59e9f257d2 --- a/typedapi/types/floatrangeproperty.go +++ b/typedapi/types/floatrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // FloatRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L38-L40 type FloatRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -51,6 +53,7 @@ type FloatRangeProperty struct { } func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,23 +68,63 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -90,6 +133,9 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -377,28 +423,54 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +758,32 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/flushstats.go b/typedapi/types/flushstats.go old mode 100755 new mode 100644 index 16de8432f3..bfc611e6ed --- a/typedapi/types/flushstats.go +++ b/typedapi/types/flushstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FlushStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L81-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L81-L86 type FlushStats struct { Periodic int64 `json:"periodic"` Total int64 `json:"total"` @@ -30,6 +40,66 @@ type FlushStats struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *FlushStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "periodic": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Periodic = value + case float64: + f := int64(v) + s.Periodic = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewFlushStats returns a FlushStats. func NewFlushStats() *FlushStats { r := &FlushStats{} diff --git a/typedapi/types/followerindex.go b/typedapi/types/followerindex.go old mode 100755 new mode 100644 index 0e25f1b4f1..94aee64fb0 --- a/typedapi/types/followerindex.go +++ b/typedapi/types/followerindex.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/followerindexstatus" + + "bytes" + "errors" + "io" + + "encoding/json" ) // FollowerIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow_info/types.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow_info/types.ts#L22-L28 type FollowerIndex struct { FollowerIndex string `json:"follower_index"` LeaderIndex string `json:"leader_index"` @@ -35,6 +41,51 @@ type FollowerIndex struct { Status followerindexstatus.FollowerIndexStatus `json:"status"` } +func (s *FollowerIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "follower_index": + if err := dec.Decode(&s.FollowerIndex); err != nil { + return err + } + + case "leader_index": + if err := dec.Decode(&s.LeaderIndex); err != nil { + return err + } + + case "parameters": + if err := dec.Decode(&s.Parameters); err != nil { + return err + } + + case "remote_cluster": + if err := dec.Decode(&s.RemoteCluster); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + } + } + return nil +} + // NewFollowerIndex returns a FollowerIndex. func NewFollowerIndex() *FollowerIndex { r := &FollowerIndex{} diff --git a/typedapi/types/followerindexparameters.go b/typedapi/types/followerindexparameters.go old mode 100755 new mode 100644 index 5c3bca5025..0790e94238 --- a/typedapi/types/followerindexparameters.go +++ b/typedapi/types/followerindexparameters.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FollowerIndexParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/follow_info/types.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/follow_info/types.ts#L38-L49 type FollowerIndexParameters struct { MaxOutstandingReadRequests int `json:"max_outstanding_read_requests"` MaxOutstandingWriteRequests int `json:"max_outstanding_write_requests"` @@ -36,6 +46,140 @@ type FollowerIndexParameters struct { ReadPollTimeout Duration `json:"read_poll_timeout"` } +func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_outstanding_read_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOutstandingReadRequests = value + case float64: + f := int(v) + s.MaxOutstandingReadRequests = f + } + + case "max_outstanding_write_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOutstandingWriteRequests = value + case float64: + f := int(v) + s.MaxOutstandingWriteRequests = f + } + + case "max_read_request_operation_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxReadRequestOperationCount = value + case float64: + f := int(v) + s.MaxReadRequestOperationCount = f + } + + case "max_read_request_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxReadRequestSize = o + + case "max_retry_delay": + if err := dec.Decode(&s.MaxRetryDelay); err != nil { + return err + } + + case "max_write_buffer_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxWriteBufferCount = value + case float64: + f := int(v) + s.MaxWriteBufferCount = f + } + + case "max_write_buffer_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxWriteBufferSize = o + + case "max_write_request_operation_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxWriteRequestOperationCount = value + case float64: + f := int(v) + s.MaxWriteRequestOperationCount = f + } + + case "max_write_request_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxWriteRequestSize = o + + case "read_poll_timeout": + if err := dec.Decode(&s.ReadPollTimeout); err != nil { + return err + } + + } + } + return nil +} + // NewFollowerIndexParameters returns a FollowerIndexParameters. func NewFollowerIndexParameters() *FollowerIndexParameters { r := &FollowerIndexParameters{} diff --git a/typedapi/types/followindexstats.go b/typedapi/types/followindexstats.go old mode 100755 new mode 100644 index dd66c210ad..e784ccedff --- a/typedapi/types/followindexstats.go +++ b/typedapi/types/followindexstats.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FollowIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/_types/FollowIndexStats.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/_types/FollowIndexStats.ts#L30-L33 type FollowIndexStats struct { Index string `json:"index"` Shards []CcrShardStats `json:"shards"` } +func (s *FollowIndexStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + } + } + return nil +} + // NewFollowIndexStats returns a FollowIndexStats. func NewFollowIndexStats() *FollowIndexStats { r := &FollowIndexStats{} diff --git a/typedapi/types/followstats.go b/typedapi/types/followstats.go old mode 100755 new mode 100644 index 28d01ad3b1..f18501fb97 --- a/typedapi/types/followstats.go +++ b/typedapi/types/followstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // FollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/stats/types.ts.ts#L41-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/stats/types.ts.ts#L41-L43 type FollowStats struct { Indices []FollowIndexStats `json:"indices"` } diff --git a/typedapi/types/forcemergeconfiguration.go b/typedapi/types/forcemergeconfiguration.go old mode 100755 new mode 100644 index e1cfcb884d..e786eb16c8 --- a/typedapi/types/forcemergeconfiguration.go +++ b/typedapi/types/forcemergeconfiguration.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ForceMergeConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L53-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L53-L55 type ForceMergeConfiguration struct { MaxNumSegments int `json:"max_num_segments"` } +func (s *ForceMergeConfiguration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_num_segments": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxNumSegments = value + case float64: + f := int(v) + s.MaxNumSegments = f + } + + } + } + return nil +} + // NewForceMergeConfiguration returns a ForceMergeConfiguration. func NewForceMergeConfiguration() *ForceMergeConfiguration { r := &ForceMergeConfiguration{} diff --git a/typedapi/types/forcemergeresponsebody.go b/typedapi/types/forcemergeresponsebody.go old mode 100755 new mode 100644 index edb467bbd8..52a1d6249b --- a/typedapi/types/forcemergeresponsebody.go +++ b/typedapi/types/forcemergeresponsebody.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ForceMergeResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/forcemerge/_types/response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/forcemerge/_types/response.ts#L22-L28 type ForceMergeResponseBody struct { Shards_ ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, diff --git a/typedapi/types/foreachprocessor.go b/typedapi/types/foreachprocessor.go old mode 100755 new mode 100644 index e367d66669..0b86c5a0b9 --- a/typedapi/types/foreachprocessor.go +++ b/typedapi/types/foreachprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ForeachProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L215-L219 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L215-L219 type ForeachProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type ForeachProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "processor": + if err := dec.Decode(&s.Processor); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewForeachProcessor returns a ForeachProcessor. func NewForeachProcessor() *ForeachProcessor { r := &ForeachProcessor{} diff --git a/typedapi/types/formattablemetricaggregation.go b/typedapi/types/formattablemetricaggregation.go old mode 100755 new mode 100644 index 2b5fd54a10..91c847859b --- a/typedapi/types/formattablemetricaggregation.go +++ b/typedapi/types/formattablemetricaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FormattableMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L44-L46 type FormattableMetricAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type FormattableMetricAggregation struct { Script Script `json:"script,omitempty"` } +func (s *FormattableMetricAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewFormattableMetricAggregation returns a FormattableMetricAggregation. func NewFormattableMetricAggregation() *FormattableMetricAggregation { r := &FormattableMetricAggregation{} diff --git a/typedapi/types/foundstatus.go b/typedapi/types/foundstatus.go old mode 100755 new mode 100644 index f04a77c86a..fb95577771 --- a/typedapi/types/foundstatus.go +++ b/typedapi/types/foundstatus.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FoundStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/delete_privileges/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/delete_privileges/types.ts#L20-L22 type FoundStatus struct { Found bool `json:"found"` } +func (s *FoundStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = value + case bool: + s.Found = v + } + + } + } + return nil +} + // NewFoundStatus returns a FoundStatus. func NewFoundStatus() *FoundStatus { r := &FoundStatus{} diff --git a/typedapi/types/frequencyencodingpreprocessor.go b/typedapi/types/frequencyencodingpreprocessor.go old mode 100755 new mode 100644 index 109d0e652a..5e9f22ec80 --- a/typedapi/types/frequencyencodingpreprocessor.go +++ b/typedapi/types/frequencyencodingpreprocessor.go @@ -16,19 +16,71 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // FrequencyEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L38-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L38-L42 type FrequencyEncodingPreprocessor struct { FeatureName string `json:"feature_name"` Field string `json:"field"` FrequencyMap map[string]Float64 `json:"frequency_map"` } +func (s *FrequencyEncodingPreprocessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Field = o + + case "frequency_map": + if s.FrequencyMap == nil { + s.FrequencyMap = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.FrequencyMap); err != nil { + return err + } + + } + } + return nil +} + // NewFrequencyEncodingPreprocessor returns a FrequencyEncodingPreprocessor. func NewFrequencyEncodingPreprocessor() *FrequencyEncodingPreprocessor { r := &FrequencyEncodingPreprocessor{ diff --git a/typedapi/types/frequentitemsetsaggregate.go b/typedapi/types/frequentitemsetsaggregate.go new file mode 100644 index 0000000000..4242deb5cc --- /dev/null +++ b/typedapi/types/frequentitemsetsaggregate.go @@ -0,0 +1,90 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// FrequentItemSetsAggregate type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L639-L640 +type FrequentItemSetsAggregate struct { + Buckets BucketsFrequentItemSetsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *FrequentItemSetsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buckets": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]FrequentItemSetsBucket, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Buckets = o + case '[': + o := []FrequentItemSetsBucket{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Buckets = o + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil +} + +// NewFrequentItemSetsAggregate returns a FrequentItemSetsAggregate. +func NewFrequentItemSetsAggregate() *FrequentItemSetsAggregate { + r := &FrequentItemSetsAggregate{} + + return r +} diff --git a/typedapi/types/frequentitemsetsaggregation.go b/typedapi/types/frequentitemsetsaggregation.go new file mode 100644 index 0000000000..333c9850df --- /dev/null +++ b/typedapi/types/frequentitemsetsaggregation.go @@ -0,0 +1,132 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// FrequentItemSetsAggregation type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L551-L575 +type FrequentItemSetsAggregation struct { + // Fields Fields to analyze + Fields []FrequentItemSetsField `json:"fields"` + // Filter Query that filters documents from analysis. + Filter *Query `json:"filter,omitempty"` + // MinimumSetSize The minimum size of one item set. + MinimumSetSize *int `json:"minimum_set_size,omitempty"` + // MinimumSupport The minimum support of one item set. + MinimumSupport *Float64 `json:"minimum_support,omitempty"` + // Size The number of top item sets to return. + Size *int `json:"size,omitempty"` +} + +func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "minimum_set_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinimumSetSize = &value + case float64: + f := int(v) + s.MinimumSetSize = &f + } + + case "minimum_support": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MinimumSupport = &f + case float64: + f := Float64(v) + s.MinimumSupport = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + +// NewFrequentItemSetsAggregation returns a FrequentItemSetsAggregation. +func NewFrequentItemSetsAggregation() *FrequentItemSetsAggregation { + r := &FrequentItemSetsAggregation{} + + return r +} diff --git a/typedapi/types/frequentitemsetsbucket.go b/typedapi/types/frequentitemsetsbucket.go new file mode 100644 index 0000000000..6cfaf9eb77 --- /dev/null +++ b/typedapi/types/frequentitemsetsbucket.go @@ -0,0 +1,656 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "fmt" + + "bytes" + "errors" + "io" + + "strings" + + "strconv" + + "encoding/json" +) + +// FrequentItemSetsBucket type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L642-L645 +type FrequentItemSetsBucket struct { + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Key map[string][]string `json:"key"` + Support Float64 `json:"support"` +} + +func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f + } + + case "key": + if s.Key == nil { + s.Key = make(map[string][]string, 0) + } + if err := dec.Decode(&s.Key); err != nil { + return err + } + + case "support": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Support = f + case float64: + f := Float64(v) + s.Support = f + } + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + + } + } + return nil +} + +// MarhsalJSON overrides marshalling for types with additional properties +func (s FrequentItemSetsBucket) MarshalJSON() ([]byte, error) { + type opt FrequentItemSetsBucket + // We transform the struct to a map without the embedded additional properties map + tmp := make(map[string]interface{}, 0) + + data, err := json.Marshal(opt(s)) + if err != nil { + return nil, err + } + err = json.Unmarshal(data, &tmp) + if err != nil { + return nil, err + } + + // We inline the additional fields from the underlying map + for key, value := range s.Aggregations { + tmp[fmt.Sprintf("%s", key)] = value + } + delete(tmp, "Aggregations") + + data, err = json.Marshal(tmp) + if err != nil { + return nil, err + } + + return data, nil +} + +// NewFrequentItemSetsBucket returns a FrequentItemSetsBucket. +func NewFrequentItemSetsBucket() *FrequentItemSetsBucket { + r := &FrequentItemSetsBucket{ + Aggregations: make(map[string]Aggregate, 0), + Key: make(map[string][]string, 0), + } + + return r +} diff --git a/typedapi/types/frequentitemsetsfield.go b/typedapi/types/frequentitemsetsfield.go new file mode 100644 index 0000000000..4428ef142b --- /dev/null +++ b/typedapi/types/frequentitemsetsfield.go @@ -0,0 +1,102 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// FrequentItemSetsField type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L545-L549 +type FrequentItemSetsField struct { + Exclude []string `json:"exclude,omitempty"` + Field string `json:"field"` + Include []string `json:"include,omitempty"` +} + +func (s *FrequentItemSetsField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exclude": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Exclude = append(s.Exclude, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { + return err + } + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "include": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Include = append(s.Include, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Include); err != nil { + return err + } + } + + } + } + return nil +} + +// NewFrequentItemSetsField returns a FrequentItemSetsField. +func NewFrequentItemSetsField() *FrequentItemSetsField { + r := &FrequentItemSetsField{} + + return r +} diff --git a/typedapi/types/frozenindices.go b/typedapi/types/frozenindices.go old mode 100755 new mode 100644 index 3c65a11557..7c1c9f543d --- a/typedapi/types/frozenindices.go +++ b/typedapi/types/frozenindices.go @@ -16,19 +16,92 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FrozenIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L351-L353 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L351-L353 type FrozenIndices struct { Available bool `json:"available"` Enabled bool `json:"enabled"` IndicesCount int64 `json:"indices_count"` } +func (s *FrozenIndices) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "indices_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndicesCount = value + case float64: + f := int64(v) + s.IndicesCount = f + } + + } + } + return nil +} + // NewFrozenIndices returns a FrozenIndices. func NewFrozenIndices() *FrozenIndices { r := &FrozenIndices{} diff --git a/typedapi/types/functionscore.go b/typedapi/types/functionscore.go old mode 100755 new mode 100644 index ec339597fc..2ffad4171b --- a/typedapi/types/functionscore.go +++ b/typedapi/types/functionscore.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FunctionScore type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L107-L127 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L107-L127 type FunctionScore struct { Exp DecayFunction `json:"exp,omitempty"` FieldValueFactor *FieldValueFactorScoreFunction `json:"field_value_factor,omitempty"` @@ -34,6 +44,77 @@ type FunctionScore struct { Weight *Float64 `json:"weight,omitempty"` } +func (s *FunctionScore) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exp": + if err := dec.Decode(&s.Exp); err != nil { + return err + } + + case "field_value_factor": + if err := dec.Decode(&s.FieldValueFactor); err != nil { + return err + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "gauss": + if err := dec.Decode(&s.Gauss); err != nil { + return err + } + + case "linear": + if err := dec.Decode(&s.Linear); err != nil { + return err + } + + case "random_score": + if err := dec.Decode(&s.RandomScore); err != nil { + return err + } + + case "script_score": + if err := dec.Decode(&s.ScriptScore); err != nil { + return err + } + + case "weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Weight = &f + case float64: + f := Float64(v) + s.Weight = &f + } + + } + } + return nil +} + // NewFunctionScore returns a FunctionScore. func NewFunctionScore() *FunctionScore { r := &FunctionScore{} diff --git a/typedapi/types/functionscorequery.go b/typedapi/types/functionscorequery.go old mode 100755 new mode 100644 index 33fc263850..850ad23b82 --- a/typedapi/types/functionscorequery.go +++ b/typedapi/types/functionscorequery.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/functionboostmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/functionscoremode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // FunctionScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L52-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L52-L59 type FunctionScoreQuery struct { Boost *float32 `json:"boost,omitempty"` BoostMode *functionboostmode.FunctionBoostMode `json:"boost_mode,omitempty"` @@ -39,6 +47,102 @@ type FunctionScoreQuery struct { ScoreMode *functionscoremode.FunctionScoreMode `json:"score_mode,omitempty"` } +func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "boost_mode": + if err := dec.Decode(&s.BoostMode); err != nil { + return err + } + + case "functions": + if err := dec.Decode(&s.Functions); err != nil { + return err + } + + case "max_boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MaxBoost = &f + case float64: + f := Float64(v) + s.MaxBoost = &f + } + + case "min_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MinScore = &f + case float64: + f := Float64(v) + s.MinScore = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "score_mode": + if err := dec.Decode(&s.ScoreMode); err != nil { + return err + } + + } + } + return nil +} + // NewFunctionScoreQuery returns a FunctionScoreQuery. func NewFunctionScoreQuery() *FunctionScoreQuery { r := &FunctionScoreQuery{} diff --git a/typedapi/types/fuzziness.go b/typedapi/types/fuzziness.go old mode 100755 new mode 100644 index be470647f6..1e33ed8b78 --- a/typedapi/types/fuzziness.go +++ b/typedapi/types/fuzziness.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L114-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L114-L114 type Fuzziness interface{} diff --git a/typedapi/types/fuzzyquery.go b/typedapi/types/fuzzyquery.go old mode 100755 new mode 100644 index bb31f19b74..56d6fcef3d --- a/typedapi/types/fuzzyquery.go +++ b/typedapi/types/fuzzyquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // FuzzyQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L40-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L40-L51 type FuzzyQuery struct { Boost *float32 `json:"boost,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` @@ -34,6 +44,119 @@ type FuzzyQuery struct { Value string `json:"value"` } +func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxExpansions = &value + case float64: + f := int(v) + s.MaxExpansions = &f + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "rewrite": + if err := dec.Decode(&s.Rewrite); err != nil { + return err + } + + case "transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Transpositions = &value + case bool: + s.Transpositions = &v + } + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewFuzzyQuery returns a FuzzyQuery. func NewFuzzyQuery() *FuzzyQuery { r := &FuzzyQuery{} diff --git a/typedapi/types/garbagecollector.go b/typedapi/types/garbagecollector.go old mode 100755 new mode 100644 index 7d2a29a192..11883f75ac --- a/typedapi/types/garbagecollector.go +++ b/typedapi/types/garbagecollector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // GarbageCollector type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L363-L365 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L363-L365 type GarbageCollector struct { Collectors map[string]GarbageCollectorTotal `json:"collectors,omitempty"` } diff --git a/typedapi/types/garbagecollectortotal.go b/typedapi/types/garbagecollectortotal.go old mode 100755 new mode 100644 index b0442c5445..c62c51da5d --- a/typedapi/types/garbagecollectortotal.go +++ b/typedapi/types/garbagecollectortotal.go @@ -16,19 +16,87 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GarbageCollectorTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L367-L371 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L367-L371 type GarbageCollectorTotal struct { CollectionCount *int64 `json:"collection_count,omitempty"` CollectionTime *string `json:"collection_time,omitempty"` CollectionTimeInMillis *int64 `json:"collection_time_in_millis,omitempty"` } +func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collection_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CollectionCount = &value + case float64: + f := int64(v) + s.CollectionCount = &f + } + + case "collection_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CollectionTime = &o + + case "collection_time_in_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CollectionTimeInMillis = &value + case float64: + f := int64(v) + s.CollectionTimeInMillis = &f + } + + } + } + return nil +} + // NewGarbageCollectorTotal returns a GarbageCollectorTotal. func NewGarbageCollectorTotal() *GarbageCollectorTotal { r := &GarbageCollectorTotal{} diff --git a/typedapi/types/geoboundingboxquery.go b/typedapi/types/geoboundingboxquery.go old mode 100755 new mode 100644 index 7f203fad18..813d7e7746 --- a/typedapi/types/geoboundingboxquery.go +++ b/typedapi/types/geoboundingboxquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,22 +24,107 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoexecution" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geovalidationmethod" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoBoundingBoxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L32-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L32-L41 type GeoBoundingBoxQuery struct { Boost *float32 `json:"boost,omitempty"` - GeoBoundingBoxQuery map[string]GeoBounds `json:"-"` + GeoBoundingBoxQuery map[string]GeoBounds `json:"GeoBoundingBoxQuery,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` Type *geoexecution.GeoExecution `json:"type,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` } +func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "GeoBoundingBoxQuery": + if s.GeoBoundingBoxQuery == nil { + s.GeoBoundingBoxQuery = make(map[string]GeoBounds, 0) + } + if err := dec.Decode(&s.GeoBoundingBoxQuery); err != nil { + return err + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "validation_method": + if err := dec.Decode(&s.ValidationMethod); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoBoundingBoxQuery) MarshalJSON() ([]byte, error) { type opt GeoBoundingBoxQuery @@ -59,6 +144,7 @@ func (s GeoBoundingBoxQuery) MarshalJSON() ([]byte, error) { for key, value := range s.GeoBoundingBoxQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoBoundingBoxQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geobounds.go b/typedapi/types/geobounds.go old mode 100755 new mode 100644 index c7a95cf6b0..0f84c13f53 --- a/typedapi/types/geobounds.go +++ b/typedapi/types/geobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,5 +27,5 @@ package types // TopRightBottomLeftGeoBounds // WktGeoBounds // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L119-L132 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L119-L132 type GeoBounds interface{} diff --git a/typedapi/types/geoboundsaggregate.go b/typedapi/types/geoboundsaggregate.go old mode 100755 new mode 100644 index 59490ca906..ea4a088583 --- a/typedapi/types/geoboundsaggregate.go +++ b/typedapi/types/geoboundsaggregate.go @@ -16,20 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // GeoBoundsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L302-L305 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L303-L306 type GeoBoundsAggregate struct { - Bounds GeoBounds `json:"bounds,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Bounds GeoBounds `json:"bounds,omitempty"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *GeoBoundsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bounds": + if err := dec.Decode(&s.Bounds); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil } // NewGeoBoundsAggregate returns a GeoBoundsAggregate. diff --git a/typedapi/types/geoboundsaggregation.go b/typedapi/types/geoboundsaggregation.go old mode 100755 new mode 100644 index e469c559c0..56acb6fdec --- a/typedapi/types/geoboundsaggregation.go +++ b/typedapi/types/geoboundsaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GeoBoundsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L72-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L72-L74 type GeoBoundsAggregation struct { Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` @@ -30,6 +40,55 @@ type GeoBoundsAggregation struct { WrapLongitude *bool `json:"wrap_longitude,omitempty"` } +func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "wrap_longitude": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.WrapLongitude = &value + case bool: + s.WrapLongitude = &v + } + + } + } + return nil +} + // NewGeoBoundsAggregation returns a GeoBoundsAggregation. func NewGeoBoundsAggregation() *GeoBoundsAggregation { r := &GeoBoundsAggregation{} diff --git a/typedapi/types/geocentroidaggregate.go b/typedapi/types/geocentroidaggregate.go old mode 100755 new mode 100644 index 3f85017e02..a0b7d00419 --- a/typedapi/types/geocentroidaggregate.go +++ b/typedapi/types/geocentroidaggregate.go @@ -16,21 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // GeoCentroidAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L307-L311 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L308-L312 type GeoCentroidAggregate struct { - Count int64 `json:"count"` - Location GeoLocation `json:"location,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Count int64 `json:"count"` + Location GeoLocation `json:"location,omitempty"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *GeoCentroidAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "location": + if err := dec.Decode(&s.Location); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil } // NewGeoCentroidAggregate returns a GeoCentroidAggregate. diff --git a/typedapi/types/geocentroidaggregation.go b/typedapi/types/geocentroidaggregation.go old mode 100755 new mode 100644 index 8bf3e142e8..0eae8f65f4 --- a/typedapi/types/geocentroidaggregation.go +++ b/typedapi/types/geocentroidaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GeoCentroidAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L76-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L76-L79 type GeoCentroidAggregation struct { Count *int64 `json:"count,omitempty"` Field *string `json:"field,omitempty"` @@ -31,6 +41,61 @@ type GeoCentroidAggregation struct { Script Script `json:"script,omitempty"` } +func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "location": + if err := dec.Decode(&s.Location); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewGeoCentroidAggregation returns a GeoCentroidAggregation. func NewGeoCentroidAggregation() *GeoCentroidAggregation { r := &GeoCentroidAggregation{} diff --git a/typedapi/types/geodecayfunction.go b/typedapi/types/geodecayfunction.go old mode 100755 new mode 100644 index 726d161638..6f1ecad5bc --- a/typedapi/types/geodecayfunction.go +++ b/typedapi/types/geodecayfunction.go @@ -16,25 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // GeoDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L96-L98 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L96-L98 type GeoDecayFunction struct { - GeoDecayFunction map[string]DecayPlacementGeoLocationDistance `json:"-"` + GeoDecayFunction map[string]DecayPlacementGeoLocationDistance `json:"GeoDecayFunction,omitempty"` MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` } +func (s *GeoDecayFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "GeoDecayFunction": + if s.GeoDecayFunction == nil { + s.GeoDecayFunction = make(map[string]DecayPlacementGeoLocationDistance, 0) + } + if err := dec.Decode(&s.GeoDecayFunction); err != nil { + return err + } + + case "multi_value_mode": + if err := dec.Decode(&s.MultiValueMode); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoDecayFunction) MarshalJSON() ([]byte, error) { type opt GeoDecayFunction @@ -54,6 +94,7 @@ func (s GeoDecayFunction) MarshalJSON() ([]byte, error) { for key, value := range s.GeoDecayFunction { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoDecayFunction") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geodistanceaggregate.go b/typedapi/types/geodistanceaggregate.go old mode 100755 new mode 100644 index 491ccabceb..22952a7111 --- a/typedapi/types/geodistanceaggregate.go +++ b/typedapi/types/geodistanceaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // GeoDistanceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L549-L553 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L550-L554 type GeoDistanceAggregate struct { - Buckets BucketsRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *GeoDistanceAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *GeoDistanceAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]RangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []RangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/geodistanceaggregation.go b/typedapi/types/geodistanceaggregation.go old mode 100755 new mode 100644 index 700b2e5bc1..69f45450f2 --- a/typedapi/types/geodistanceaggregation.go +++ b/typedapi/types/geodistanceaggregation.go @@ -16,30 +16,92 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/distanceunit" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geodistancetype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // GeoDistanceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L176-L182 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L176-L182 type GeoDistanceAggregation struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` Field *string `json:"field,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Origin GeoLocation `json:"origin,omitempty"` Ranges []AggregationRange `json:"ranges,omitempty"` Unit *distanceunit.DistanceUnit `json:"unit,omitempty"` } +func (s *GeoDistanceAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "distance_type": + if err := dec.Decode(&s.DistanceType); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "ranges": + if err := dec.Decode(&s.Ranges); err != nil { + return err + } + + case "unit": + if err := dec.Decode(&s.Unit); err != nil { + return err + } + + } + } + return nil +} + // NewGeoDistanceAggregation returns a GeoDistanceAggregation. func NewGeoDistanceAggregation() *GeoDistanceAggregation { r := &GeoDistanceAggregation{} diff --git a/typedapi/types/geodistancefeaturequery.go b/typedapi/types/geodistancefeaturequery.go old mode 100755 new mode 100644 index 4aba9705f9..1337baf4f6 --- a/typedapi/types/geodistancefeaturequery.go +++ b/typedapi/types/geodistancefeaturequery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GeoDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L46-L49 type GeoDistanceFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -31,6 +41,65 @@ type GeoDistanceFeatureQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *GeoDistanceFeatureQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return err + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewGeoDistanceFeatureQuery returns a GeoDistanceFeatureQuery. func NewGeoDistanceFeatureQuery() *GeoDistanceFeatureQuery { r := &GeoDistanceFeatureQuery{} diff --git a/typedapi/types/geodistancequery.go b/typedapi/types/geodistancequery.go old mode 100755 new mode 100644 index 6e8d3ec5bf..0daed66aa3 --- a/typedapi/types/geodistancequery.go +++ b/typedapi/types/geodistancequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,22 +24,98 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geodistancetype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geovalidationmethod" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoDistanceQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L48-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L48-L57 type GeoDistanceQuery struct { Boost *float32 `json:"boost,omitempty"` Distance *string `json:"distance,omitempty"` DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` - GeoDistanceQuery map[string]GeoLocation `json:"-"` + GeoDistanceQuery map[string]GeoLocation `json:"GeoDistanceQuery,omitempty"` QueryName_ *string `json:"_name,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` } +func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "distance": + if err := dec.Decode(&s.Distance); err != nil { + return err + } + + case "distance_type": + if err := dec.Decode(&s.DistanceType); err != nil { + return err + } + + case "GeoDistanceQuery": + if s.GeoDistanceQuery == nil { + s.GeoDistanceQuery = make(map[string]GeoLocation, 0) + } + if err := dec.Decode(&s.GeoDistanceQuery); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "validation_method": + if err := dec.Decode(&s.ValidationMethod); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoDistanceQuery) MarshalJSON() ([]byte, error) { type opt GeoDistanceQuery @@ -59,6 +135,7 @@ func (s GeoDistanceQuery) MarshalJSON() ([]byte, error) { for key, value := range s.GeoDistanceQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoDistanceQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geodistancesort.go b/typedapi/types/geodistancesort.go old mode 100755 new mode 100644 index 9614309273..9426213247 --- a/typedapi/types/geodistancesort.go +++ b/typedapi/types/geodistancesort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,22 +26,110 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortorder" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoDistanceSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L58-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L58-L66 type GeoDistanceSort struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` - GeoDistanceSort map[string][]GeoLocation `json:"-"` + GeoDistanceSort map[string][]GeoLocation `json:"GeoDistanceSort,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` Mode *sortmode.SortMode `json:"mode,omitempty"` Order *sortorder.SortOrder `json:"order,omitempty"` Unit *distanceunit.DistanceUnit `json:"unit,omitempty"` } +func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "distance_type": + if err := dec.Decode(&s.DistanceType); err != nil { + return err + } + + case "GeoDistanceSort": + if s.GeoDistanceSort == nil { + s.GeoDistanceSort = make(map[string][]GeoLocation, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := new(GeoLocation) + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.GeoDistanceSort[key] = append(s.GeoDistanceSort[key], o) + default: + o := []GeoLocation{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.GeoDistanceSort[key] = o + } + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "unit": + if err := dec.Decode(&s.Unit); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoDistanceSort) MarshalJSON() ([]byte, error) { type opt GeoDistanceSort @@ -61,6 +149,7 @@ func (s GeoDistanceSort) MarshalJSON() ([]byte, error) { for key, value := range s.GeoDistanceSort { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoDistanceSort") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geohashgridaggregate.go b/typedapi/types/geohashgridaggregate.go old mode 100755 new mode 100644 index 66600fff0e..e1d7c3869b --- a/typedapi/types/geohashgridaggregate.go +++ b/typedapi/types/geohashgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // GeoHashGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L505-L507 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L506-L508 type GeoHashGridAggregate struct { - Buckets BucketsGeoHashGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoHashGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *GeoHashGridAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *GeoHashGridAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoHashGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoHashGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/geohashgridaggregation.go b/typedapi/types/geohashgridaggregation.go old mode 100755 new mode 100644 index d15e52c0b0..bfc9950683 --- a/typedapi/types/geohashgridaggregation.go +++ b/typedapi/types/geohashgridaggregation.go @@ -16,25 +16,111 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // GeoHashGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L184-L190 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L184-L190 type GeoHashGridAggregation struct { - Bounds GeoBounds `json:"bounds,omitempty"` - Field *string `json:"field,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Precision GeoHashPrecision `json:"precision,omitempty"` - ShardSize *int `json:"shard_size,omitempty"` - Size *int `json:"size,omitempty"` + Bounds GeoBounds `json:"bounds,omitempty"` + Field *string `json:"field,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Precision GeoHashPrecision `json:"precision,omitempty"` + ShardSize *int `json:"shard_size,omitempty"` + Size *int `json:"size,omitempty"` +} + +func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bounds": + if err := dec.Decode(&s.Bounds); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "precision": + if err := dec.Decode(&s.Precision); err != nil { + return err + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil } // NewGeoHashGridAggregation returns a GeoHashGridAggregation. diff --git a/typedapi/types/geohashgridbucket.go b/typedapi/types/geohashgridbucket.go old mode 100755 new mode 100644 index fcd98053aa..f9b53d5742 --- a/typedapi/types/geohashgridbucket.go +++ b/typedapi/types/geohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // GeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L509-L511 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L510-L512 type GeoHashGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -42,6 +44,7 @@ type GeoHashGridBucket struct { } func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": @@ -507,6 +78,519 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s GeoHashGridBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geohashlocation.go b/typedapi/types/geohashlocation.go old mode 100755 new mode 100644 index 22a5a99c36..c37f534090 --- a/typedapi/types/geohashlocation.go +++ b/typedapi/types/geohashlocation.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoHashLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L115-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L115-L117 type GeoHashLocation struct { Geohash string `json:"geohash"` } +func (s *GeoHashLocation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "geohash": + if err := dec.Decode(&s.Geohash); err != nil { + return err + } + + } + } + return nil +} + // NewGeoHashLocation returns a GeoHashLocation. func NewGeoHashLocation() *GeoHashLocation { r := &GeoHashLocation{} diff --git a/typedapi/types/geohashprecision.go b/typedapi/types/geohashprecision.go old mode 100755 new mode 100644 index b4ec4d93c6..8023549bd6 --- a/typedapi/types/geohashprecision.go +++ b/typedapi/types/geohashprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L76-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L76-L80 type GeoHashPrecision interface{} diff --git a/typedapi/types/geohexgridaggregate.go b/typedapi/types/geohexgridaggregate.go old mode 100755 new mode 100644 index 2b9921c241..eb6d21ff12 --- a/typedapi/types/geohexgridaggregate.go +++ b/typedapi/types/geohexgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // GeoHexGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L521-L522 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L522-L523 type GeoHexGridAggregate struct { - Buckets BucketsGeoHexGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoHexGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *GeoHexGridAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *GeoHexGridAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoHexGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoHexGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/geohexgridaggregation.go b/typedapi/types/geohexgridaggregation.go old mode 100755 new mode 100644 index 0dd2b4e1e9..2770f584a2 --- a/typedapi/types/geohexgridaggregation.go +++ b/typedapi/types/geohexgridaggregation.go @@ -16,26 +16,32 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // GeohexGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L200-L226 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L200-L226 type GeohexGridAggregation struct { // Bounds Bounding box used to filter the geo-points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` // Field Field containing indexed geo-point values. Must be explicitly // mapped as a `geo_point` field. If the field contains an array // `geohex_grid` aggregates all array values. - Field string `json:"field"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field string `json:"field"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` // Precision Integer zoom of the key used to defined cells or buckets // in the results. Value should be between 0-15. Precision *int `json:"precision,omitempty"` @@ -45,6 +51,97 @@ type GeohexGridAggregation struct { Size *int `json:"size,omitempty"` } +func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bounds": + if err := dec.Decode(&s.Bounds); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "precision": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Precision = &value + case float64: + f := int(v) + s.Precision = &f + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + // NewGeohexGridAggregation returns a GeohexGridAggregation. func NewGeohexGridAggregation() *GeohexGridAggregation { r := &GeohexGridAggregation{} diff --git a/typedapi/types/geohexgridbucket.go b/typedapi/types/geohexgridbucket.go old mode 100755 new mode 100644 index 9d1f7e1726..49b6e922b9 --- a/typedapi/types/geohexgridbucket.go +++ b/typedapi/types/geohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // GeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L524-L526 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L525-L527 type GeoHexGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -42,6 +44,7 @@ type GeoHexGridBucket struct { } func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": @@ -507,6 +78,519 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s GeoHexGridBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geoipdownloadstatistics.go b/typedapi/types/geoipdownloadstatistics.go old mode 100755 new mode 100644 index 807dd759a2..a69ffba6eb --- a/typedapi/types/geoipdownloadstatistics.go +++ b/typedapi/types/geoipdownloadstatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GeoIpDownloadStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/geo_ip_stats/types.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/geo_ip_stats/types.ts#L24-L35 type GeoIpDownloadStatistics struct { // DatabaseCount Current number of databases available for use. DatabaseCount int `json:"database_count"` @@ -36,6 +46,95 @@ type GeoIpDownloadStatistics struct { TotalDownloadTime int64 `json:"total_download_time"` } +func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "database_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DatabaseCount = value + case float64: + f := int(v) + s.DatabaseCount = f + } + + case "failed_downloads": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailedDownloads = value + case float64: + f := int(v) + s.FailedDownloads = f + } + + case "skipped_updates": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SkippedUpdates = value + case float64: + f := int(v) + s.SkippedUpdates = f + } + + case "successful_downloads": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SuccessfulDownloads = value + case float64: + f := int(v) + s.SuccessfulDownloads = f + } + + case "total_download_time": + if err := dec.Decode(&s.TotalDownloadTime); err != nil { + return err + } + + } + } + return nil +} + // NewGeoIpDownloadStatistics returns a GeoIpDownloadStatistics. func NewGeoIpDownloadStatistics() *GeoIpDownloadStatistics { r := &GeoIpDownloadStatistics{} diff --git a/typedapi/types/geoipnodedatabasename.go b/typedapi/types/geoipnodedatabasename.go old mode 100755 new mode 100644 index a86424acbf..fa4c583327 --- a/typedapi/types/geoipnodedatabasename.go +++ b/typedapi/types/geoipnodedatabasename.go @@ -16,18 +16,51 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoIpNodeDatabaseName type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/geo_ip_stats/types.ts#L45-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/geo_ip_stats/types.ts#L45-L48 type GeoIpNodeDatabaseName struct { // Name Name of the database. Name string `json:"name"` } +func (s *GeoIpNodeDatabaseName) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewGeoIpNodeDatabaseName returns a GeoIpNodeDatabaseName. func NewGeoIpNodeDatabaseName() *GeoIpNodeDatabaseName { r := &GeoIpNodeDatabaseName{} diff --git a/typedapi/types/geoipnodedatabases.go b/typedapi/types/geoipnodedatabases.go old mode 100755 new mode 100644 index a64b229531..c303312e46 --- a/typedapi/types/geoipnodedatabases.go +++ b/typedapi/types/geoipnodedatabases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // GeoIpNodeDatabases type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/geo_ip_stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/geo_ip_stats/types.ts#L37-L43 type GeoIpNodeDatabases struct { // Databases Downloaded databases for the node. Databases []GeoIpNodeDatabaseName `json:"databases"` diff --git a/typedapi/types/geoipprocessor.go b/typedapi/types/geoipprocessor.go old mode 100755 new mode 100644 index a0e520b8c1..c26ca3bb0c --- a/typedapi/types/geoipprocessor.go +++ b/typedapi/types/geoipprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GeoIpProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L106-L113 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L106-L113 type GeoIpProcessor struct { DatabaseFile *string `json:"database_file,omitempty"` Description *string `json:"description,omitempty"` @@ -37,6 +47,120 @@ type GeoIpProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "database_file": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DatabaseFile = &o + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "first_only": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FirstOnly = &value + case bool: + s.FirstOnly = &v + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "properties": + if err := dec.Decode(&s.Properties); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewGeoIpProcessor returns a GeoIpProcessor. func NewGeoIpProcessor() *GeoIpProcessor { r := &GeoIpProcessor{} diff --git a/typedapi/types/geoline.go b/typedapi/types/geoline.go old mode 100755 new mode 100644 index 7dbb8b5cc6..2a68010f06 --- a/typedapi/types/geoline.go +++ b/typedapi/types/geoline.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoLine type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L59-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L59-L65 type GeoLine struct { // Coordinates Array of `[lon, lat]` coordinates Coordinates [][]Float64 `json:"coordinates"` @@ -30,6 +38,39 @@ type GeoLine struct { Type string `json:"type"` } +func (s *GeoLine) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "coordinates": + if err := dec.Decode(&s.Coordinates); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewGeoLine returns a GeoLine. func NewGeoLine() *GeoLine { r := &GeoLine{} diff --git a/typedapi/types/geolineaggregate.go b/typedapi/types/geolineaggregate.go old mode 100755 new mode 100644 index 8e445da728..f94a269a2b --- a/typedapi/types/geolineaggregate.go +++ b/typedapi/types/geolineaggregate.go @@ -16,22 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // GeoLineAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L775-L782 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L784-L791 type GeoLineAggregate struct { - Geometry GeoLine `json:"geometry"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Properties json.RawMessage `json:"properties,omitempty"` - Type string `json:"type"` + Geometry GeoLine `json:"geometry"` + Meta Metadata `json:"meta,omitempty"` + Properties json.RawMessage `json:"properties,omitempty"` + Type string `json:"type"` +} + +func (s *GeoLineAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "geometry": + if err := dec.Decode(&s.Geometry); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "properties": + if err := dec.Decode(&s.Properties); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil } // NewGeoLineAggregate returns a GeoLineAggregate. diff --git a/typedapi/types/geolineaggregation.go b/typedapi/types/geolineaggregation.go old mode 100755 new mode 100644 index 7fd4843f5e..fa84825319 --- a/typedapi/types/geolineaggregation.go +++ b/typedapi/types/geolineaggregation.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortorder" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoLineAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L81-L87 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L81-L87 type GeoLineAggregation struct { IncludeSort *bool `json:"include_sort,omitempty"` Point GeoLinePoint `json:"point"` @@ -35,6 +43,71 @@ type GeoLineAggregation struct { SortOrder *sortorder.SortOrder `json:"sort_order,omitempty"` } +func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "include_sort": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeSort = &value + case bool: + s.IncludeSort = &v + } + + case "point": + if err := dec.Decode(&s.Point); err != nil { + return err + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "sort_order": + if err := dec.Decode(&s.SortOrder); err != nil { + return err + } + + } + } + return nil +} + // NewGeoLineAggregation returns a GeoLineAggregation. func NewGeoLineAggregation() *GeoLineAggregation { r := &GeoLineAggregation{} diff --git a/typedapi/types/geolinepoint.go b/typedapi/types/geolinepoint.go old mode 100755 new mode 100644 index 203499c8df..7d37e2aba7 --- a/typedapi/types/geolinepoint.go +++ b/typedapi/types/geolinepoint.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoLinePoint type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L93-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L93-L95 type GeoLinePoint struct { Field string `json:"field"` } +func (s *GeoLinePoint) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + } + } + return nil +} + // NewGeoLinePoint returns a GeoLinePoint. func NewGeoLinePoint() *GeoLinePoint { r := &GeoLinePoint{} diff --git a/typedapi/types/geolinesort.go b/typedapi/types/geolinesort.go old mode 100755 new mode 100644 index c0a1c8bafb..80e77fbd54 --- a/typedapi/types/geolinesort.go +++ b/typedapi/types/geolinesort.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoLineSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L89-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L89-L91 type GeoLineSort struct { Field string `json:"field"` } +func (s *GeoLineSort) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + } + } + return nil +} + // NewGeoLineSort returns a GeoLineSort. func NewGeoLineSort() *GeoLineSort { r := &GeoLineSort{} diff --git a/typedapi/types/geolocation.go b/typedapi/types/geolocation.go old mode 100755 new mode 100644 index 7cdf224064..3f45bdeeda --- a/typedapi/types/geolocation.go +++ b/typedapi/types/geolocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,5 +27,5 @@ package types // []Float64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L94-L108 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L94-L108 type GeoLocation interface{} diff --git a/typedapi/types/geopointproperty.go b/typedapi/types/geopointproperty.go old mode 100755 new mode 100644 index c2c3bd2f4d..3cca38c6ad --- a/typedapi/types/geopointproperty.go +++ b/typedapi/types/geopointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // GeoPointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L23-L28 type GeoPointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -51,6 +53,7 @@ type GeoPointProperty struct { } func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,13 +68,33 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -80,6 +103,9 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -367,28 +393,60 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "ignore_z_value": - if err := dec.Decode(&s.IgnoreZValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreZValue = &value + case bool: + s.IgnoreZValue = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -399,6 +457,9 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +747,32 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/geopolygonpoints.go b/typedapi/types/geopolygonpoints.go old mode 100755 new mode 100644 index 3d9a4542ac..ba5010fa85 --- a/typedapi/types/geopolygonpoints.go +++ b/typedapi/types/geopolygonpoints.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GeoPolygonPoints type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L59-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L59-L61 type GeoPolygonPoints struct { Points []GeoLocation `json:"points"` } +func (s *GeoPolygonPoints) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "points": + if err := dec.Decode(&s.Points); err != nil { + return err + } + + } + } + return nil +} + // NewGeoPolygonPoints returns a GeoPolygonPoints. func NewGeoPolygonPoints() *GeoPolygonPoints { r := &GeoPolygonPoints{} diff --git a/typedapi/types/geopolygonquery.go b/typedapi/types/geopolygonquery.go old mode 100755 new mode 100644 index 516df701fd..0f9dce2878 --- a/typedapi/types/geopolygonquery.go +++ b/typedapi/types/geopolygonquery.go @@ -16,28 +16,108 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geovalidationmethod" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoPolygonQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L63-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L63-L71 type GeoPolygonQuery struct { Boost *float32 `json:"boost,omitempty"` - GeoPolygonQuery map[string]GeoPolygonPoints `json:"-"` + GeoPolygonQuery map[string]GeoPolygonPoints `json:"GeoPolygonQuery,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` } +func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "GeoPolygonQuery": + if s.GeoPolygonQuery == nil { + s.GeoPolygonQuery = make(map[string]GeoPolygonPoints, 0) + } + if err := dec.Decode(&s.GeoPolygonQuery); err != nil { + return err + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "validation_method": + if err := dec.Decode(&s.ValidationMethod); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoPolygonQuery) MarshalJSON() ([]byte, error) { type opt GeoPolygonQuery @@ -57,6 +137,7 @@ func (s GeoPolygonQuery) MarshalJSON() ([]byte, error) { for key, value := range s.GeoPolygonQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoPolygonQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/georesults.go b/typedapi/types/georesults.go old mode 100755 new mode 100644 index 91129c5ce9..c034d3e20f --- a/typedapi/types/georesults.go +++ b/typedapi/types/georesults.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // GeoResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Anomaly.ts#L145-L154 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Anomaly.ts#L145-L154 type GeoResults struct { // ActualPoint The actual value for the bucket formatted as a `geo_point`. ActualPoint string `json:"actual_point"` diff --git a/typedapi/types/geoshape.go b/typedapi/types/geoshape.go old mode 100755 new mode 100644 index d4d83ba443..646bcd9263 --- a/typedapi/types/geoshape.go +++ b/typedapi/types/geoshape.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ import "encoding/json" // GeoShape type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L56-L57 -type GeoShape json.RawMessage +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L56-L57 +type GeoShape = json.RawMessage diff --git a/typedapi/types/geoshapefieldquery.go b/typedapi/types/geoshapefieldquery.go old mode 100755 new mode 100644 index cadd284882..d991e651e3 --- a/typedapi/types/geoshapefieldquery.go +++ b/typedapi/types/geoshapefieldquery.go @@ -16,25 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoshaperelation" + + "bytes" + "errors" + "io" + + "encoding/json" ) // GeoShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L78-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L78-L82 type GeoShapeFieldQuery struct { IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` Relation *geoshaperelation.GeoShapeRelation `json:"relation,omitempty"` Shape json.RawMessage `json:"shape,omitempty"` } +func (s *GeoShapeFieldQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indexed_shape": + if err := dec.Decode(&s.IndexedShape); err != nil { + return err + } + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return err + } + + case "shape": + if err := dec.Decode(&s.Shape); err != nil { + return err + } + + } + } + return nil +} + // NewGeoShapeFieldQuery returns a GeoShapeFieldQuery. func NewGeoShapeFieldQuery() *GeoShapeFieldQuery { r := &GeoShapeFieldQuery{} diff --git a/typedapi/types/geoshapeproperty.go b/typedapi/types/geoshapeproperty.go old mode 100755 new mode 100644 index c3a5b6db48..a4e76588e2 --- a/typedapi/types/geoshapeproperty.go +++ b/typedapi/types/geoshapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // GeoShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L37-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L37-L50 type GeoShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -55,6 +57,7 @@ type GeoShapeProperty struct { } func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -69,18 +72,47 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { switch t { case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -89,6 +121,9 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -376,28 +411,60 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "ignore_z_value": - if err := dec.Decode(&s.IgnoreZValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreZValue = &value + case bool: + s.IgnoreZValue = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -408,6 +475,9 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -695,20 +765,32 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "strategy": diff --git a/typedapi/types/geoshapequery.go b/typedapi/types/geoshapequery.go old mode 100755 new mode 100644 index c38cd2f3bc..46129b084e --- a/typedapi/types/geoshapequery.go +++ b/typedapi/types/geoshapequery.go @@ -16,25 +16,100 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // GeoShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/geo.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/geo.ts#L86-L91 type GeoShapeQuery struct { Boost *float32 `json:"boost,omitempty"` - GeoShapeQuery map[string]GeoShapeFieldQuery `json:"-"` + GeoShapeQuery map[string]GeoShapeFieldQuery `json:"GeoShapeQuery,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` } +func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "GeoShapeQuery": + if s.GeoShapeQuery == nil { + s.GeoShapeQuery = make(map[string]GeoShapeFieldQuery, 0) + } + if err := dec.Decode(&s.GeoShapeQuery); err != nil { + return err + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s GeoShapeQuery) MarshalJSON() ([]byte, error) { type opt GeoShapeQuery @@ -54,6 +129,7 @@ func (s GeoShapeQuery) MarshalJSON() ([]byte, error) { for key, value := range s.GeoShapeQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "GeoShapeQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/geotilegridaggregate.go b/typedapi/types/geotilegridaggregate.go old mode 100755 new mode 100644 index 8f9fc36932..aa615f7032 --- a/typedapi/types/geotilegridaggregate.go +++ b/typedapi/types/geotilegridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // GeoTileGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L513-L515 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L514-L516 type GeoTileGridAggregate struct { - Buckets BucketsGeoTileGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoTileGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *GeoTileGridAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *GeoTileGridAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoTileGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoTileGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/geotilegridaggregation.go b/typedapi/types/geotilegridaggregation.go old mode 100755 new mode 100644 index dcfd846ca6..1ea802f5bb --- a/typedapi/types/geotilegridaggregation.go +++ b/typedapi/types/geotilegridaggregation.go @@ -16,25 +16,111 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // GeoTileGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L192-L198 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L192-L198 type GeoTileGridAggregation struct { - Bounds GeoBounds `json:"bounds,omitempty"` - Field *string `json:"field,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Precision *int `json:"precision,omitempty"` - ShardSize *int `json:"shard_size,omitempty"` - Size *int `json:"size,omitempty"` + Bounds GeoBounds `json:"bounds,omitempty"` + Field *string `json:"field,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Precision *int `json:"precision,omitempty"` + ShardSize *int `json:"shard_size,omitempty"` + Size *int `json:"size,omitempty"` +} + +func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bounds": + if err := dec.Decode(&s.Bounds); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "precision": + if err := dec.Decode(&s.Precision); err != nil { + return err + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil } // NewGeoTileGridAggregation returns a GeoTileGridAggregation. diff --git a/typedapi/types/geotilegridbucket.go b/typedapi/types/geotilegridbucket.go old mode 100755 new mode 100644 index 90ea44d242..ecae7c3b67 --- a/typedapi/types/geotilegridbucket.go +++ b/typedapi/types/geotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // GeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L517-L519 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L518-L520 type GeoTileGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -42,6 +44,7 @@ type GeoTileGridBucket struct { } func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": @@ -507,6 +78,519 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s GeoTileGridBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/getmigrationfeature.go b/typedapi/types/getmigrationfeature.go old mode 100755 new mode 100644 index eab3758076..58d9003614 --- a/typedapi/types/getmigrationfeature.go +++ b/typedapi/types/getmigrationfeature.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/migrationstatus" + + "bytes" + "errors" + "io" + + "encoding/json" ) // GetMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 type GetMigrationFeature struct { FeatureName string `json:"feature_name"` Indices []MigrationFeatureIndexInfo `json:"indices"` @@ -34,6 +40,49 @@ type GetMigrationFeature struct { MinimumIndexVersion string `json:"minimum_index_version"` } +func (s *GetMigrationFeature) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "migration_status": + if err := dec.Decode(&s.MigrationStatus); err != nil { + return err + } + + case "minimum_index_version": + if err := dec.Decode(&s.MinimumIndexVersion); err != nil { + return err + } + + } + } + return nil +} + // NewGetMigrationFeature returns a GetMigrationFeature. func NewGetMigrationFeature() *GetMigrationFeature { r := &GetMigrationFeature{} diff --git a/typedapi/types/getresult.go b/typedapi/types/getresult.go old mode 100755 new mode 100644 index 738cce40da..6aeeb3cecd --- a/typedapi/types/getresult.go +++ b/typedapi/types/getresult.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // GetResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get/types.ts#L25-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get/types.ts#L25-L35 type GetResult struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -39,6 +45,96 @@ type GetResult struct { Version_ *int64 `json:"_version,omitempty"` } +func (s *GetResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = value + case bool: + s.Found = v + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm_ = &value + case float64: + f := int64(v) + s.PrimaryTerm_ = &f + } + + case "_routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Routing_ = &o + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "_version": + if err := dec.Decode(&s.Version_); err != nil { + return err + } + + } + } + return nil +} + // NewGetResult returns a GetResult. func NewGetResult() *GetResult { r := &GetResult{ diff --git a/typedapi/types/getscriptcontext.go b/typedapi/types/getscriptcontext.go old mode 100755 new mode 100644 index 22d10f9ea3..26269be282 --- a/typedapi/types/getscriptcontext.go +++ b/typedapi/types/getscriptcontext.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // GetScriptContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_context/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_context/types.ts#L22-L25 type GetScriptContext struct { Methods []ContextMethod `json:"methods"` Name string `json:"name"` } +func (s *GetScriptContext) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "methods": + if err := dec.Decode(&s.Methods); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewGetScriptContext returns a GetScriptContext. func NewGetScriptContext() *GetScriptContext { r := &GetScriptContext{} diff --git a/typedapi/types/getstats.go b/typedapi/types/getstats.go old mode 100755 new mode 100644 index f100b1dbbe..d0c7bae913 --- a/typedapi/types/getstats.go +++ b/typedapi/types/getstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GetStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L88-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L88-L99 type GetStats struct { Current int64 `json:"current"` ExistsTime Duration `json:"exists_time,omitempty"` @@ -36,6 +46,116 @@ type GetStats struct { Total int64 `json:"total"` } +func (s *GetStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = value + case float64: + f := int64(v) + s.Current = f + } + + case "exists_time": + if err := dec.Decode(&s.ExistsTime); err != nil { + return err + } + + case "exists_time_in_millis": + if err := dec.Decode(&s.ExistsTimeInMillis); err != nil { + return err + } + + case "exists_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ExistsTotal = value + case float64: + f := int64(v) + s.ExistsTotal = f + } + + case "missing_time": + if err := dec.Decode(&s.MissingTime); err != nil { + return err + } + + case "missing_time_in_millis": + if err := dec.Decode(&s.MissingTimeInMillis); err != nil { + return err + } + + case "missing_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MissingTotal = value + case float64: + f := int64(v) + s.MissingTotal = f + } + + case "time": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewGetStats returns a GetStats. func NewGetStats() *GetStats { r := &GetStats{} diff --git a/typedapi/types/getuserprofileerrors.go b/typedapi/types/getuserprofileerrors.go old mode 100755 new mode 100644 index 13c3cf409f..3add0444f1 --- a/typedapi/types/getuserprofileerrors.go +++ b/typedapi/types/getuserprofileerrors.go @@ -16,18 +16,71 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GetUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_user_profile/types.ts#L25-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_user_profile/types.ts#L25-L28 type GetUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` } +func (s *GetUserProfileErrors) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "details": + if s.Details == nil { + s.Details = make(map[string]ErrorCause, 0) + } + if err := dec.Decode(&s.Details); err != nil { + return err + } + + } + } + return nil +} + // NewGetUserProfileErrors returns a GetUserProfileErrors. func NewGetUserProfileErrors() *GetUserProfileErrors { r := &GetUserProfileErrors{ diff --git a/typedapi/types/globalaggregate.go b/typedapi/types/globalaggregate.go old mode 100755 new mode 100644 index 88d13b84ea..2991d8e16d --- a/typedapi/types/globalaggregate.go +++ b/typedapi/types/globalaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // GlobalAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L491-L492 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L492-L493 type GlobalAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s GlobalAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/globalaggregation.go b/typedapi/types/globalaggregation.go old mode 100755 new mode 100644 index 8a70b8bcd0..0944e4f418 --- a/typedapi/types/globalaggregation.go +++ b/typedapi/types/globalaggregation.go @@ -16,20 +16,57 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // GlobalAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L228-L228 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L228-L228 type GlobalAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *GlobalAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewGlobalAggregation returns a GlobalAggregation. diff --git a/typedapi/types/globalprivilege.go b/typedapi/types/globalprivilege.go old mode 100755 new mode 100644 index d8c0a13caf..756e5db2d4 --- a/typedapi/types/globalprivilege.go +++ b/typedapi/types/globalprivilege.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // GlobalPrivilege type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L187-L189 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L189-L191 type GlobalPrivilege struct { Application ApplicationGlobalUserPrivileges `json:"application"` } diff --git a/typedapi/types/googlenormalizeddistanceheuristic.go b/typedapi/types/googlenormalizeddistanceheuristic.go old mode 100755 new mode 100644 index e4a5d0e406..fdb10a3d95 --- a/typedapi/types/googlenormalizeddistanceheuristic.go +++ b/typedapi/types/googlenormalizeddistanceheuristic.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GoogleNormalizedDistanceHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L327-L329 type GoogleNormalizedDistanceHeuristic struct { BackgroundIsSuperset *bool `json:"background_is_superset,omitempty"` } +func (s *GoogleNormalizedDistanceHeuristic) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "background_is_superset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BackgroundIsSuperset = &value + case bool: + s.BackgroundIsSuperset = &v + } + + } + } + return nil +} + // NewGoogleNormalizedDistanceHeuristic returns a GoogleNormalizedDistanceHeuristic. func NewGoogleNormalizedDistanceHeuristic() *GoogleNormalizedDistanceHeuristic { r := &GoogleNormalizedDistanceHeuristic{} diff --git a/typedapi/types/grantapikey.go b/typedapi/types/grantapikey.go old mode 100755 new mode 100644 index 935abb5d55..520b3330d8 --- a/typedapi/types/grantapikey.go +++ b/typedapi/types/grantapikey.go @@ -16,24 +16,84 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // GrantApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/grant_api_key/types.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/grant_api_key/types.ts#L25-L32 type GrantApiKey struct { Expiration *string `json:"expiration,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` Name string `json:"name"` RoleDescriptors []map[string]RoleDescriptor `json:"role_descriptors,omitempty"` } +func (s *GrantApiKey) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expiration": + if err := dec.Decode(&s.Expiration); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "role_descriptors": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]RoleDescriptor, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.RoleDescriptors = append(s.RoleDescriptors, o) + case '[': + o := make([]map[string]RoleDescriptor, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.RoleDescriptors = o + } + + } + } + return nil +} + // NewGrantApiKey returns a GrantApiKey. func NewGrantApiKey() *GrantApiKey { r := &GrantApiKey{} diff --git a/typedapi/types/grokprocessor.go b/typedapi/types/grokprocessor.go old mode 100755 new mode 100644 index b7276c3b62..fdffa723d3 --- a/typedapi/types/grokprocessor.go +++ b/typedapi/types/grokprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GrokProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L221-L227 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L221-L227 type GrokProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -36,6 +46,115 @@ type GrokProcessor struct { TraceMatch *bool `json:"trace_match,omitempty"` } +func (s *GrokProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "pattern_definitions": + if s.PatternDefinitions == nil { + s.PatternDefinitions = make(map[string]string, 0) + } + if err := dec.Decode(&s.PatternDefinitions); err != nil { + return err + } + + case "patterns": + if err := dec.Decode(&s.Patterns); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "trace_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TraceMatch = &value + case bool: + s.TraceMatch = &v + } + + } + } + return nil +} + // NewGrokProcessor returns a GrokProcessor. func NewGrokProcessor() *GrokProcessor { r := &GrokProcessor{ diff --git a/typedapi/types/groupings.go b/typedapi/types/groupings.go old mode 100755 new mode 100644 index 39201684fb..973bfa90d4 --- a/typedapi/types/groupings.go +++ b/typedapi/types/groupings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Groupings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Groupings.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Groupings.ts#L24-L28 type Groupings struct { DateHistogram *DateHistogramGrouping `json:"date_histogram,omitempty"` Histogram *HistogramGrouping `json:"histogram,omitempty"` diff --git a/typedapi/types/gsubprocessor.go b/typedapi/types/gsubprocessor.go old mode 100755 new mode 100644 index b46afc437d..41ecd09b7b --- a/typedapi/types/gsubprocessor.go +++ b/typedapi/types/gsubprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // GsubProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L229-L235 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L229-L235 type GsubProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -36,6 +46,109 @@ type GsubProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *GsubProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "replacement": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Replacement = o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewGsubProcessor returns a GsubProcessor. func NewGsubProcessor() *GsubProcessor { r := &GsubProcessor{} diff --git a/typedapi/types/halffloatnumberproperty.go b/typedapi/types/halffloatnumberproperty.go old mode 100755 new mode 100644 index b54ba0c08c..5854d79649 --- a/typedapi/types/halffloatnumberproperty.go +++ b/typedapi/types/halffloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HalfFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L136-L139 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L136-L139 type HalfFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type HalfFloatNumberProperty struct { } func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,35 +435,78 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.NullValue = &f + case float64: + f := float32(v) + s.NullValue = &f } case "on_script_error": @@ -426,6 +515,9 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +805,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +817,39 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/haschildquery.go b/typedapi/types/haschildquery.go old mode 100755 new mode 100644 index fd1a1f791e..0c4d4f76fc --- a/typedapi/types/haschildquery.go +++ b/typedapi/types/haschildquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/childscoremode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // HasChildQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/joining.ts#L41-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/joining.ts#L41-L51 type HasChildQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` @@ -39,6 +47,116 @@ type HasChildQuery struct { Type string `json:"type"` } +func (s *HasChildQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "inner_hits": + if err := dec.Decode(&s.InnerHits); err != nil { + return err + } + + case "max_children": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxChildren = &value + case float64: + f := int(v) + s.MaxChildren = &f + } + + case "min_children": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinChildren = &value + case float64: + f := int(v) + s.MinChildren = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "score_mode": + if err := dec.Decode(&s.ScoreMode); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewHasChildQuery returns a HasChildQuery. func NewHasChildQuery() *HasChildQuery { r := &HasChildQuery{} diff --git a/typedapi/types/hasparentquery.go b/typedapi/types/hasparentquery.go old mode 100755 new mode 100644 index 776c5a9f46..cabd5ff478 --- a/typedapi/types/hasparentquery.go +++ b/typedapi/types/hasparentquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HasParentQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/joining.ts#L53-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/joining.ts#L53-L61 type HasParentQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` @@ -33,6 +43,93 @@ type HasParentQuery struct { Score *bool `json:"score,omitempty"` } +func (s *HasParentQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "inner_hits": + if err := dec.Decode(&s.InnerHits); err != nil { + return err + } + + case "parent_type": + if err := dec.Decode(&s.ParentType); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Score = &value + case bool: + s.Score = &v + } + + } + } + return nil +} + // NewHasParentQuery returns a HasParentQuery. func NewHasParentQuery() *HasParentQuery { r := &HasParentQuery{} diff --git a/typedapi/types/hasprivilegesuserprofileerrors.go b/typedapi/types/hasprivilegesuserprofileerrors.go old mode 100755 new mode 100644 index f90fd39124..639fc95183 --- a/typedapi/types/hasprivilegesuserprofileerrors.go +++ b/typedapi/types/hasprivilegesuserprofileerrors.go @@ -16,18 +16,71 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HasPrivilegesUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges_user_profile/types.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges_user_profile/types.ts#L39-L42 type HasPrivilegesUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` } +func (s *HasPrivilegesUserProfileErrors) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "details": + if s.Details == nil { + s.Details = make(map[string]ErrorCause, 0) + } + if err := dec.Decode(&s.Details); err != nil { + return err + } + + } + } + return nil +} + // NewHasPrivilegesUserProfileErrors returns a HasPrivilegesUserProfileErrors. func NewHasPrivilegesUserProfileErrors() *HasPrivilegesUserProfileErrors { r := &HasPrivilegesUserProfileErrors{ diff --git a/typedapi/types/hdrmethod.go b/typedapi/types/hdrmethod.go old mode 100755 new mode 100644 index 339bcd5025..8c4f87de16 --- a/typedapi/types/hdrmethod.go +++ b/typedapi/types/hdrmethod.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HdrMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L119-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L119-L121 type HdrMethod struct { NumberOfSignificantValueDigits *int `json:"number_of_significant_value_digits,omitempty"` } +func (s *HdrMethod) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "number_of_significant_value_digits": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfSignificantValueDigits = &value + case float64: + f := int(v) + s.NumberOfSignificantValueDigits = &f + } + + } + } + return nil +} + // NewHdrMethod returns a HdrMethod. func NewHdrMethod() *HdrMethod { r := &HdrMethod{} diff --git a/typedapi/types/hdrpercentileranksaggregate.go b/typedapi/types/hdrpercentileranksaggregate.go old mode 100755 new mode 100644 index 6caa3e16d3..c9d4d2a435 --- a/typedapi/types/hdrpercentileranksaggregate.go +++ b/typedapi/types/hdrpercentileranksaggregate.go @@ -16,20 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // HdrPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L168-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L169-L170 type HdrPercentileRanksAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Values Percentiles `json:"values"` + Meta Metadata `json:"meta,omitempty"` + Values Percentiles `json:"values"` +} + +func (s *HdrPercentileRanksAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "values": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(KeyedPercentiles, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + case '[': + o := []ArrayPercentilesItem{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + } + + } + } + return nil } // NewHdrPercentileRanksAggregate returns a HdrPercentileRanksAggregate. diff --git a/typedapi/types/hdrpercentilesaggregate.go b/typedapi/types/hdrpercentilesaggregate.go old mode 100755 new mode 100644 index 0cc89d1099..aeef683f5f --- a/typedapi/types/hdrpercentilesaggregate.go +++ b/typedapi/types/hdrpercentilesaggregate.go @@ -16,20 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // HdrPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L165-L166 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L166-L167 type HdrPercentilesAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Values Percentiles `json:"values"` + Meta Metadata `json:"meta,omitempty"` + Values Percentiles `json:"values"` +} + +func (s *HdrPercentilesAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "values": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(KeyedPercentiles, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + case '[': + o := []ArrayPercentilesItem{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + } + + } + } + return nil } // NewHdrPercentilesAggregate returns a HdrPercentilesAggregate. diff --git a/typedapi/types/healthrecord.go b/typedapi/types/healthrecord.go old mode 100755 new mode 100644 index 7975295fd6..65e3cd0404 --- a/typedapi/types/healthrecord.go +++ b/typedapi/types/healthrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HealthRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/health/types.ts#L23-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/health/types.ts#L23-L94 type HealthRecord struct { // ActiveShardsPercent active number of shards in percent ActiveShardsPercent *string `json:"active_shards_percent,omitempty"` @@ -54,6 +62,132 @@ type HealthRecord struct { Unassign *string `json:"unassign,omitempty"` } +func (s *HealthRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active_shards_percent", "asp", "activeShardsPercent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ActiveShardsPercent = &o + + case "cluster", "cl": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Cluster = &o + + case "epoch", "time": + if err := dec.Decode(&s.Epoch); err != nil { + return err + } + + case "init", "i", "shards.initializing", "shardsInitializing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Init = &o + + case "max_task_wait_time", "mtwt", "maxTaskWaitTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxTaskWaitTime = &o + + case "node.data", "nd", "nodeData": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeData = &o + + case "node.total", "nt", "nodeTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeTotal = &o + + case "pending_tasks", "pt", "pendingTasks": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PendingTasks = &o + + case "pri", "p", "shards.primary", "shardsPrimary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pri = &o + + case "relo", "r", "shards.relocating", "shardsRelocating": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Relo = &o + + case "shards", "t", "sh", "shards.total", "shardsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shards = &o + + case "status", "st": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = &o + + case "timestamp", "ts", "hms", "hhmmss": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "unassign", "u", "shards.unassigned", "shardsUnassigned": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Unassign = &o + + } + } + return nil +} + // NewHealthRecord returns a HealthRecord. func NewHealthRecord() *HealthRecord { r := &HealthRecord{} diff --git a/typedapi/types/healthresponsebody.go b/typedapi/types/healthresponsebody.go new file mode 100644 index 0000000000..1ddd7fa384 --- /dev/null +++ b/typedapi/types/healthresponsebody.go @@ -0,0 +1,310 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// HealthResponseBody type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/health/ClusterHealthResponse.ts#L39-L72 +type HealthResponseBody struct { + // ActivePrimaryShards The number of active primary shards. + ActivePrimaryShards int `json:"active_primary_shards"` + // ActiveShards The total number of active primary and replica shards. + ActiveShards int `json:"active_shards"` + // ActiveShardsPercentAsNumber The ratio of active shards in the cluster expressed as a percentage. + ActiveShardsPercentAsNumber Percentage `json:"active_shards_percent_as_number"` + // ClusterName The name of the cluster. + ClusterName string `json:"cluster_name"` + // DelayedUnassignedShards The number of shards whose allocation has been delayed by the timeout + // settings. + DelayedUnassignedShards int `json:"delayed_unassigned_shards"` + Indices map[string]IndexHealthStats `json:"indices,omitempty"` + // InitializingShards The number of shards that are under initialization. + InitializingShards int `json:"initializing_shards"` + // NumberOfDataNodes The number of nodes that are dedicated data nodes. + NumberOfDataNodes int `json:"number_of_data_nodes"` + // NumberOfInFlightFetch The number of unfinished fetches. + NumberOfInFlightFetch int `json:"number_of_in_flight_fetch"` + // NumberOfNodes The number of nodes within the cluster. + NumberOfNodes int `json:"number_of_nodes"` + // NumberOfPendingTasks The number of cluster-level changes that have not yet been executed. + NumberOfPendingTasks int `json:"number_of_pending_tasks"` + // RelocatingShards The number of shards that are under relocation. + RelocatingShards int `json:"relocating_shards"` + Status healthstatus.HealthStatus `json:"status"` + // TaskMaxWaitingInQueue The time since the earliest initiated task is waiting for being performed. + TaskMaxWaitingInQueue Duration `json:"task_max_waiting_in_queue,omitempty"` + // TaskMaxWaitingInQueueMillis The time expressed in milliseconds since the earliest initiated task is + // waiting for being performed. + TaskMaxWaitingInQueueMillis int64 `json:"task_max_waiting_in_queue_millis"` + // TimedOut If false the response returned within the period of time that is specified by + // the timeout parameter (30s by default) + TimedOut bool `json:"timed_out"` + // UnassignedShards The number of shards that are not allocated. + UnassignedShards int `json:"unassigned_shards"` +} + +func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active_primary_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActivePrimaryShards = value + case float64: + f := int(v) + s.ActivePrimaryShards = f + } + + case "active_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActiveShards = value + case float64: + f := int(v) + s.ActiveShards = f + } + + case "active_shards_percent_as_number": + if err := dec.Decode(&s.ActiveShardsPercentAsNumber); err != nil { + return err + } + + case "cluster_name": + if err := dec.Decode(&s.ClusterName); err != nil { + return err + } + + case "delayed_unassigned_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DelayedUnassignedShards = value + case float64: + f := int(v) + s.DelayedUnassignedShards = f + } + + case "indices": + if s.Indices == nil { + s.Indices = make(map[string]IndexHealthStats, 0) + } + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "initializing_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InitializingShards = value + case float64: + f := int(v) + s.InitializingShards = f + } + + case "number_of_data_nodes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfDataNodes = value + case float64: + f := int(v) + s.NumberOfDataNodes = f + } + + case "number_of_in_flight_fetch": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfInFlightFetch = value + case float64: + f := int(v) + s.NumberOfInFlightFetch = f + } + + case "number_of_nodes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfNodes = value + case float64: + f := int(v) + s.NumberOfNodes = f + } + + case "number_of_pending_tasks": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfPendingTasks = value + case float64: + f := int(v) + s.NumberOfPendingTasks = f + } + + case "relocating_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelocatingShards = value + case float64: + f := int(v) + s.RelocatingShards = f + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "task_max_waiting_in_queue": + if err := dec.Decode(&s.TaskMaxWaitingInQueue); err != nil { + return err + } + + case "task_max_waiting_in_queue_millis": + if err := dec.Decode(&s.TaskMaxWaitingInQueueMillis); err != nil { + return err + } + + case "timed_out": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v + } + + case "unassigned_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UnassignedShards = value + case float64: + f := int(v) + s.UnassignedShards = f + } + + } + } + return nil +} + +// NewHealthResponseBody returns a HealthResponseBody. +func NewHealthResponseBody() *HealthResponseBody { + r := &HealthResponseBody{ + Indices: make(map[string]IndexHealthStats, 0), + } + + return r +} diff --git a/typedapi/types/healthstatistics.go b/typedapi/types/healthstatistics.go old mode 100755 new mode 100644 index 3a5316ea5b..333419c160 --- a/typedapi/types/healthstatistics.go +++ b/typedapi/types/healthstatistics.go @@ -16,19 +16,82 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HealthStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L153-L155 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L153-L155 type HealthStatistics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Invocations Invocations `json:"invocations"` } +func (s *HealthStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "invocations": + if err := dec.Decode(&s.Invocations); err != nil { + return err + } + + } + } + return nil +} + // NewHealthStatistics returns a HealthStatistics. func NewHealthStatistics() *HealthStatistics { r := &HealthStatistics{} diff --git a/typedapi/types/helprecord.go b/typedapi/types/helprecord.go old mode 100755 new mode 100644 index 7fb57961dc..6b1d5f26b6 --- a/typedapi/types/helprecord.go +++ b/typedapi/types/helprecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // HelpRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/help/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/help/types.ts#L20-L22 type HelpRecord struct { Endpoint string `json:"endpoint"` } diff --git a/typedapi/types/highlight.go b/typedapi/types/highlight.go old mode 100755 new mode 100644 index 8eef0b5c60..d2e541364a --- a/typedapi/types/highlight.go +++ b/typedapi/types/highlight.go @@ -16,24 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/boundaryscanner" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/highlighterencoder" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/highlighterfragmenter" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/highlighterorder" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/highlightertagsschema" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/highlightertype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // Highlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L57-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L57-L60 type Highlight struct { BoundaryChars *string `json:"boundary_chars,omitempty"` BoundaryMaxScan *int `json:"boundary_max_scan,omitempty"` @@ -60,6 +66,257 @@ type Highlight struct { Type *highlightertype.HighlighterType `json:"type,omitempty"` } +func (s *Highlight) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boundary_chars": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BoundaryChars = &o + + case "boundary_max_scan": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BoundaryMaxScan = &value + case float64: + f := int(v) + s.BoundaryMaxScan = &f + } + + case "boundary_scanner": + if err := dec.Decode(&s.BoundaryScanner); err != nil { + return err + } + + case "boundary_scanner_locale": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BoundaryScannerLocale = &o + + case "encoder": + if err := dec.Decode(&s.Encoder); err != nil { + return err + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]HighlightField, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "force_source": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ForceSource = &value + case bool: + s.ForceSource = &v + } + + case "fragment_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FragmentSize = &value + case float64: + f := int(v) + s.FragmentSize = &f + } + + case "fragmenter": + if err := dec.Decode(&s.Fragmenter); err != nil { + return err + } + + case "highlight_filter": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HighlightFilter = &value + case bool: + s.HighlightFilter = &v + } + + case "highlight_query": + if err := dec.Decode(&s.HighlightQuery); err != nil { + return err + } + + case "max_analyzed_offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAnalyzedOffset = &value + case float64: + f := int(v) + s.MaxAnalyzedOffset = &f + } + + case "max_fragment_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxFragmentLength = &value + case float64: + f := int(v) + s.MaxFragmentLength = &f + } + + case "no_match_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NoMatchSize = &value + case float64: + f := int(v) + s.NoMatchSize = &f + } + + case "number_of_fragments": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfFragments = &value + case float64: + f := int(v) + s.NumberOfFragments = &f + } + + case "options": + if s.Options == nil { + s.Options = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Options); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "phrase_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PhraseLimit = &value + case float64: + f := int(v) + s.PhraseLimit = &f + } + + case "post_tags": + if err := dec.Decode(&s.PostTags); err != nil { + return err + } + + case "pre_tags": + if err := dec.Decode(&s.PreTags); err != nil { + return err + } + + case "require_field_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RequireFieldMatch = &value + case bool: + s.RequireFieldMatch = &v + } + + case "tags_schema": + if err := dec.Decode(&s.TagsSchema); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewHighlight returns a Highlight. func NewHighlight() *Highlight { r := &Highlight{ diff --git a/typedapi/types/highlightfield.go b/typedapi/types/highlightfield.go old mode 100755 new mode 100644 index ad9a428f45..df4ffbe427 --- a/typedapi/types/highlightfield.go +++ b/typedapi/types/highlightfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -31,12 +31,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HighlightField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/highlighting.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/highlighting.ts#L88-L92 type HighlightField struct { Analyzer Analyzer `json:"analyzer,omitempty"` BoundaryChars *string `json:"boundary_chars,omitempty"` @@ -65,6 +67,7 @@ type HighlightField struct { } func (s *HighlightField) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -92,102 +95,116 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "custom": o := NewCustomAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "fingerprint": o := NewFingerprintAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "keyword": o := NewKeywordAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "language": o := NewLanguageAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "nori": o := NewNoriAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "pattern": o := NewPatternAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "simple": o := NewSimpleAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "standard": o := NewStandardAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "stop": o := NewStopAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "whitespace": o := NewWhitespaceAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "icu_analyzer": o := NewIcuAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "kuromoji": o := NewKuromojiAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "snowball": o := NewSnowballAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o case "dutch": o := NewDutchAnalyzer() - if err := localDec.Decode(o); err != nil { + if err := localDec.Decode(&o); err != nil { return err } s.Analyzer = *o default: - if err := dec.Decode(&s.Analyzer); err != nil { + if err := localDec.Decode(&s.Analyzer); err != nil { return err } } case "boundary_chars": - if err := dec.Decode(&s.BoundaryChars); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.BoundaryChars = &o case "boundary_max_scan": - if err := dec.Decode(&s.BoundaryMaxScan); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BoundaryMaxScan = &value + case float64: + f := int(v) + s.BoundaryMaxScan = &f } case "boundary_scanner": @@ -196,23 +213,57 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "boundary_scanner_locale": - if err := dec.Decode(&s.BoundaryScannerLocale); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.BoundaryScannerLocale = &o case "force_source": - if err := dec.Decode(&s.ForceSource); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ForceSource = &value + case bool: + s.ForceSource = &v } case "fragment_offset": - if err := dec.Decode(&s.FragmentOffset); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FragmentOffset = &value + case float64: + f := int(v) + s.FragmentOffset = &f } case "fragment_size": - if err := dec.Decode(&s.FragmentSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FragmentSize = &value + case float64: + f := int(v) + s.FragmentSize = &f } case "fragmenter": @@ -221,8 +272,17 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "highlight_filter": - if err := dec.Decode(&s.HighlightFilter); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HighlightFilter = &value + case bool: + s.HighlightFilter = &v } case "highlight_query": @@ -231,31 +291,89 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "matched_fields": - if err := dec.Decode(&s.MatchedFields); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.MatchedFields = append(s.MatchedFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MatchedFields); err != nil { + return err + } } case "max_analyzed_offset": - if err := dec.Decode(&s.MaxAnalyzedOffset); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAnalyzedOffset = &value + case float64: + f := int(v) + s.MaxAnalyzedOffset = &f } case "max_fragment_length": - if err := dec.Decode(&s.MaxFragmentLength); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxFragmentLength = &value + case float64: + f := int(v) + s.MaxFragmentLength = &f } case "no_match_size": - if err := dec.Decode(&s.NoMatchSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NoMatchSize = &value + case float64: + f := int(v) + s.NoMatchSize = &f } case "number_of_fragments": - if err := dec.Decode(&s.NumberOfFragments); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfFragments = &value + case float64: + f := int(v) + s.NumberOfFragments = &f } case "options": + if s.Options == nil { + s.Options = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Options); err != nil { return err } @@ -266,8 +384,19 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "phrase_limit": - if err := dec.Decode(&s.PhraseLimit); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PhraseLimit = &value + case float64: + f := int(v) + s.PhraseLimit = &f } case "post_tags": @@ -281,8 +410,17 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "require_field_match": - if err := dec.Decode(&s.RequireFieldMatch); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RequireFieldMatch = &value + case bool: + s.RequireFieldMatch = &v } case "tags_schema": diff --git a/typedapi/types/hint.go b/typedapi/types/hint.go old mode 100755 new mode 100644 index e3bf36d059..64d16f9aa6 --- a/typedapi/types/hint.go +++ b/typedapi/types/hint.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Hint type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/suggest_user_profiles/types.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/suggest_user_profiles/types.ts#L23-L34 type Hint struct { // Labels A single key-value pair to match against the labels section // of a profile. A profile is considered matching if it matches @@ -32,6 +40,56 @@ type Hint struct { Uids []string `json:"uids,omitempty"` } +func (s *Hint) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "labels": + if s.Labels == nil { + s.Labels = make(map[string][]string, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := new(string) + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Labels[key] = append(s.Labels[key], *o) + default: + o := []string{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Labels[key] = o + } + } + + case "uids": + if err := dec.Decode(&s.Uids); err != nil { + return err + } + + } + } + return nil +} + // NewHint returns a Hint. func NewHint() *Hint { r := &Hint{ diff --git a/typedapi/types/histogramaggregate.go b/typedapi/types/histogramaggregate.go old mode 100755 new mode 100644 index 0b60fa0af4..e39cda91d6 --- a/typedapi/types/histogramaggregate.go +++ b/typedapi/types/histogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // HistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L339-L340 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L340-L341 type HistogramAggregate struct { - Buckets BucketsHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsHistogramBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *HistogramAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *HistogramAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]HistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []HistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/histogramaggregation.go b/typedapi/types/histogramaggregation.go old mode 100755 new mode 100644 index 9c7ba935fd..b936b7206c --- a/typedapi/types/histogramaggregation.go +++ b/typedapi/types/histogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,29 +27,32 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L235-L247 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L235-L247 type HistogramAggregation struct { - ExtendedBounds *ExtendedBoundsdouble `json:"extended_bounds,omitempty"` - Field *string `json:"field,omitempty"` - Format *string `json:"format,omitempty"` - HardBounds *ExtendedBoundsdouble `json:"hard_bounds,omitempty"` - Interval *Float64 `json:"interval,omitempty"` - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - MinDocCount *int `json:"min_doc_count,omitempty"` - Missing *Float64 `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` - Offset *Float64 `json:"offset,omitempty"` - Order AggregateOrder `json:"order,omitempty"` - Script Script `json:"script,omitempty"` + ExtendedBounds *ExtendedBoundsdouble `json:"extended_bounds,omitempty"` + Field *string `json:"field,omitempty"` + Format *string `json:"format,omitempty"` + HardBounds *ExtendedBoundsdouble `json:"hard_bounds,omitempty"` + Interval *Float64 `json:"interval,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + MinDocCount *int `json:"min_doc_count,omitempty"` + Missing *Float64 `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` + Offset *Float64 `json:"offset,omitempty"` + Order AggregateOrder `json:"order,omitempty"` + Script Script `json:"script,omitempty"` } func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -74,9 +77,12 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "hard_bounds": if err := dec.Decode(&s.HardBounds); err != nil { @@ -84,13 +90,33 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "interval": - if err := dec.Decode(&s.Interval); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Interval = &f + case float64: + f := Float64(v) + s.Interval = &f } case "keyed": - if err := dec.Decode(&s.Keyed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v } case "meta": @@ -99,23 +125,59 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "min_doc_count": - if err := dec.Decode(&s.MinDocCount); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int(v) + s.MinDocCount = &f } case "missing": - if err := dec.Decode(&s.Missing); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Missing = &f + case float64: + f := Float64(v) + s.Missing = &f } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "offset": - if err := dec.Decode(&s.Offset); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Offset = &f + case float64: + f := Float64(v) + s.Offset = &f } case "order": @@ -125,15 +187,17 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o - case '[': o := make([]map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o } diff --git a/typedapi/types/histogrambucket.go b/typedapi/types/histogrambucket.go old mode 100755 new mode 100644 index c4710d7391..0528cc3a3c --- a/typedapi/types/histogrambucket.go +++ b/typedapi/types/histogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // HistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L342-L345 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L343-L346 type HistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -43,6 +45,7 @@ type HistogramBucket struct { } func (s *HistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -56,462 +59,557 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Key = f + case float64: + f := Float64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -537,6 +635,7 @@ func (s HistogramBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/histogramgrouping.go b/typedapi/types/histogramgrouping.go old mode 100755 new mode 100644 index 18666a6d5b..8ca286a3ef --- a/typedapi/types/histogramgrouping.go +++ b/typedapi/types/histogramgrouping.go @@ -16,18 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Groupings.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Groupings.ts#L44-L47 type HistogramGrouping struct { Fields []string `json:"fields"` Interval int64 `json:"interval"` } +func (s *HistogramGrouping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "interval": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Interval = value + case float64: + f := int64(v) + s.Interval = f + } + + } + } + return nil +} + // NewHistogramGrouping returns a HistogramGrouping. func NewHistogramGrouping() *HistogramGrouping { r := &HistogramGrouping{} diff --git a/typedapi/types/histogramproperty.go b/typedapi/types/histogramproperty.go old mode 100755 new mode 100644 index f5a0f3e175..d53f6ee38c --- a/typedapi/types/histogramproperty.go +++ b/typedapi/types/histogramproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HistogramProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L54-L57 type HistogramProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -45,6 +47,7 @@ type HistogramProperty struct { } func (s *HistogramProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,6 +67,9 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -351,28 +357,54 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -660,7 +692,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/hit.go b/typedapi/types/hit.go old mode 100755 new mode 100644 index 22e3c3e656..cb5912df76 --- a/typedapi/types/hit.go +++ b/typedapi/types/hit.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // Hit type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L40-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L40-L64 type Hit struct { Explanation_ *Explanation `json:"_explanation,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` @@ -49,6 +55,152 @@ type Hit struct { Version_ *int64 `json:"_version,omitempty"` } +func (s *Hit) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_explanation": + if err := dec.Decode(&s.Explanation_); err != nil { + return err + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "highlight": + if s.Highlight == nil { + s.Highlight = make(map[string][]string, 0) + } + if err := dec.Decode(&s.Highlight); err != nil { + return err + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "ignored_field_values": + if s.IgnoredFieldValues == nil { + s.IgnoredFieldValues = make(map[string][]string, 0) + } + if err := dec.Decode(&s.IgnoredFieldValues); err != nil { + return err + } + + case "_ignored": + if err := dec.Decode(&s.Ignored_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "inner_hits": + if s.InnerHits == nil { + s.InnerHits = make(map[string]InnerHitsResult, 0) + } + if err := dec.Decode(&s.InnerHits); err != nil { + return err + } + + case "matched_queries": + if err := dec.Decode(&s.MatchedQueries); err != nil { + return err + } + + case "_nested": + if err := dec.Decode(&s.Nested_); err != nil { + return err + } + + case "_node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node_ = &o + + case "_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm_ = &value + case float64: + f := int64(v) + s.PrimaryTerm_ = &f + } + + case "_routing": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Routing_ = &o + + case "_score": + if err := dec.Decode(&s.Score_); err != nil { + return err + } + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + case "_shard": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shard_ = &o + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "_version": + if err := dec.Decode(&s.Version_); err != nil { + return err + } + + } + } + return nil +} + // NewHit returns a Hit. func NewHit() *Hit { r := &Hit{ diff --git a/typedapi/types/hitsevent.go b/typedapi/types/hitsevent.go old mode 100755 new mode 100644 index eb0f13dacd..b81e771fe4 --- a/typedapi/types/hitsevent.go +++ b/typedapi/types/hitsevent.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // HitsEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/_types/EqlHits.ts#L41-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/_types/EqlHits.ts#L41-L49 type HitsEvent struct { Fields map[string][]json.RawMessage `json:"fields,omitempty"` // Id_ Unique identifier for the event. This ID is only unique within the index. @@ -37,6 +41,49 @@ type HitsEvent struct { Source_ json.RawMessage `json:"_source,omitempty"` } +func (s *HitsEvent) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string][]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + } + } + return nil +} + // NewHitsEvent returns a HitsEvent. func NewHitsEvent() *HitsEvent { r := &HitsEvent{ diff --git a/typedapi/types/hitsmetadata.go b/typedapi/types/hitsmetadata.go old mode 100755 new mode 100644 index 7c4f61496b..c6777a78dd --- a/typedapi/types/hitsmetadata.go +++ b/typedapi/types/hitsmetadata.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // HitsMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L66-L72 type HitsMetadata struct { Hits []Hit `json:"hits"` MaxScore Float64 `json:"max_score,omitempty"` @@ -31,6 +38,41 @@ type HitsMetadata struct { Total *TotalHits `json:"total,omitempty"` } +func (s *HitsMetadata) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hits": + if err := dec.Decode(&s.Hits); err != nil { + return err + } + + case "max_score": + if err := dec.Decode(&s.MaxScore); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewHitsMetadata returns a HitsMetadata. func NewHitsMetadata() *HitsMetadata { r := &HitsMetadata{} diff --git a/typedapi/types/hitssequence.go b/typedapi/types/hitssequence.go old mode 100755 new mode 100644 index cf7bef31e2..15cd83c78c --- a/typedapi/types/hitssequence.go +++ b/typedapi/types/hitssequence.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // HitsSequence type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/eql/_types/EqlHits.ts#L51-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/eql/_types/EqlHits.ts#L51-L59 type HitsSequence struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events"` diff --git a/typedapi/types/holtlinearmodelsettings.go b/typedapi/types/holtlinearmodelsettings.go old mode 100755 new mode 100644 index 9400b44ee4..09ebe4b3c1 --- a/typedapi/types/holtlinearmodelsettings.go +++ b/typedapi/types/holtlinearmodelsettings.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HoltLinearModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L231-L234 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L231-L234 type HoltLinearModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` } +func (s *HoltLinearModelSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Alpha = &f + case float64: + f := float32(v) + s.Alpha = &f + } + + case "beta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Beta = &f + case float64: + f := float32(v) + s.Beta = &f + } + + } + } + return nil +} + // NewHoltLinearModelSettings returns a HoltLinearModelSettings. func NewHoltLinearModelSettings() *HoltLinearModelSettings { r := &HoltLinearModelSettings{} diff --git a/typedapi/types/holtmovingaverageaggregation.go b/typedapi/types/holtmovingaverageaggregation.go old mode 100755 new mode 100644 index b16966087f..ff860a4d57 --- a/typedapi/types/holtmovingaverageaggregation.go +++ b/typedapi/types/holtmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,27 +27,30 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HoltMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L217-L220 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L217-L220 type HoltMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Minimize *bool `json:"minimize,omitempty"` - Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` - Predict *int `json:"predict,omitempty"` - Settings HoltLinearModelSettings `json:"settings"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Minimize *bool `json:"minimize,omitempty"` + Model string `json:"model,omitempty"` + Name *string `json:"name,omitempty"` + Predict *int `json:"predict,omitempty"` + Settings HoltLinearModelSettings `json:"settings"` + Window *int `json:"window,omitempty"` } func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,9 +70,12 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -82,8 +88,17 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "minimize": - if err := dec.Decode(&s.Minimize); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Minimize = &value + case bool: + s.Minimize = &v } case "model": @@ -92,13 +107,27 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "predict": - if err := dec.Decode(&s.Predict); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Predict = &value + case float64: + f := int(v) + s.Predict = &f } case "settings": @@ -107,8 +136,19 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/holtwintersmodelsettings.go b/typedapi/types/holtwintersmodelsettings.go old mode 100755 new mode 100644 index f2f79840ee..8a6c81a5db --- a/typedapi/types/holtwintersmodelsettings.go +++ b/typedapi/types/holtwintersmodelsettings.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/holtwinterstype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // HoltWintersModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L235-L242 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L235-L242 type HoltWintersModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` @@ -36,6 +44,109 @@ type HoltWintersModelSettings struct { Type *holtwinterstype.HoltWintersType `json:"type,omitempty"` } +func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Alpha = &f + case float64: + f := float32(v) + s.Alpha = &f + } + + case "beta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Beta = &f + case float64: + f := float32(v) + s.Beta = &f + } + + case "gamma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Gamma = &f + case float64: + f := float32(v) + s.Gamma = &f + } + + case "pad": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Pad = &value + case bool: + s.Pad = &v + } + + case "period": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Period = &value + case float64: + f := int(v) + s.Period = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewHoltWintersModelSettings returns a HoltWintersModelSettings. func NewHoltWintersModelSettings() *HoltWintersModelSettings { r := &HoltWintersModelSettings{} diff --git a/typedapi/types/holtwintersmovingaverageaggregation.go b/typedapi/types/holtwintersmovingaverageaggregation.go old mode 100755 new mode 100644 index 9097802b80..9968a4f339 --- a/typedapi/types/holtwintersmovingaverageaggregation.go +++ b/typedapi/types/holtwintersmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,27 +27,30 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // HoltWintersMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L222-L225 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L222-L225 type HoltWintersMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Minimize *bool `json:"minimize,omitempty"` - Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` - Predict *int `json:"predict,omitempty"` - Settings HoltWintersModelSettings `json:"settings"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Minimize *bool `json:"minimize,omitempty"` + Model string `json:"model,omitempty"` + Name *string `json:"name,omitempty"` + Predict *int `json:"predict,omitempty"` + Settings HoltWintersModelSettings `json:"settings"` + Window *int `json:"window,omitempty"` } func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,9 +70,12 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -82,8 +88,17 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "minimize": - if err := dec.Decode(&s.Minimize); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Minimize = &value + case bool: + s.Minimize = &v } case "model": @@ -92,13 +107,27 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "predict": - if err := dec.Decode(&s.Predict); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Predict = &value + case float64: + f := int(v) + s.Predict = &f } case "settings": @@ -107,8 +136,19 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/hop.go b/typedapi/types/hop.go old mode 100755 new mode 100644 index 75785d4ef5..83ea405ee5 --- a/typedapi/types/hop.go +++ b/typedapi/types/hop.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Hop type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/Hop.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/Hop.ts#L23-L27 type Hop struct { Connections *Hop `json:"connections,omitempty"` Query Query `json:"query"` diff --git a/typedapi/types/hotthread.go b/typedapi/types/hotthread.go old mode 100755 new mode 100644 index 44bbd36999..039c9d2a4c --- a/typedapi/types/hotthread.go +++ b/typedapi/types/hotthread.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HotThread type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/hot_threads/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/hot_threads/types.ts#L23-L28 type HotThread struct { Hosts []string `json:"hosts"` NodeId string `json:"node_id"` @@ -30,6 +38,46 @@ type HotThread struct { Threads []string `json:"threads"` } +func (s *HotThread) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hosts": + if err := dec.Decode(&s.Hosts); err != nil { + return err + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "node_name": + if err := dec.Decode(&s.NodeName); err != nil { + return err + } + + case "threads": + if err := dec.Decode(&s.Threads); err != nil { + return err + } + + } + } + return nil +} + // NewHotThread returns a HotThread. func NewHotThread() *HotThread { r := &HotThread{} diff --git a/typedapi/types/hourandminute.go b/typedapi/types/hourandminute.go old mode 100755 new mode 100644 index 2e62e7bbd6..a6a97558a5 --- a/typedapi/types/hourandminute.go +++ b/typedapi/types/hourandminute.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HourAndMinute type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L110-L113 type HourAndMinute struct { Hour []int `json:"hour"` Minute []int `json:"minute"` } +func (s *HourAndMinute) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hour": + if err := dec.Decode(&s.Hour); err != nil { + return err + } + + case "minute": + if err := dec.Decode(&s.Minute); err != nil { + return err + } + + } + } + return nil +} + // NewHourAndMinute returns a HourAndMinute. func NewHourAndMinute() *HourAndMinute { r := &HourAndMinute{} diff --git a/typedapi/types/hourlyschedule.go b/typedapi/types/hourlyschedule.go old mode 100755 new mode 100644 index 443c0bdf7b..db3d39048c --- a/typedapi/types/hourlyschedule.go +++ b/typedapi/types/hourlyschedule.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HourlySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L47-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L47-L49 type HourlySchedule struct { Minute []int `json:"minute"` } +func (s *HourlySchedule) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "minute": + if err := dec.Decode(&s.Minute); err != nil { + return err + } + + } + } + return nil +} + // NewHourlySchedule returns a HourlySchedule. func NewHourlySchedule() *HourlySchedule { r := &HourlySchedule{} diff --git a/typedapi/types/htmlstripcharfilter.go b/typedapi/types/htmlstripcharfilter.go old mode 100755 new mode 100644 index 3dd8948c97..2204b67dbe --- a/typedapi/types/htmlstripcharfilter.go +++ b/typedapi/types/htmlstripcharfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HtmlStripCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/char_filters.ts#L43-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/char_filters.ts#L43-L45 type HtmlStripCharFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *HtmlStripCharFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewHtmlStripCharFilter returns a HtmlStripCharFilter. func NewHtmlStripCharFilter() *HtmlStripCharFilter { r := &HtmlStripCharFilter{} diff --git a/typedapi/types/http.go b/typedapi/types/http.go old mode 100755 new mode 100644 index f9eefbf300..10c50bf202 --- a/typedapi/types/http.go +++ b/typedapi/types/http.go @@ -16,19 +16,85 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Http type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L266-L270 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L266-L270 type Http struct { Clients []Client `json:"clients,omitempty"` CurrentOpen *int `json:"current_open,omitempty"` TotalOpened *int64 `json:"total_opened,omitempty"` } +func (s *Http) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "clients": + if err := dec.Decode(&s.Clients); err != nil { + return err + } + + case "current_open": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CurrentOpen = &value + case float64: + f := int(v) + s.CurrentOpen = &f + } + + case "total_opened": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalOpened = &value + case float64: + f := int64(v) + s.TotalOpened = &f + } + + } + } + return nil +} + // NewHttp returns a Http. func NewHttp() *Http { r := &Http{} diff --git a/typedapi/types/httpemailattachment.go b/typedapi/types/httpemailattachment.go old mode 100755 new mode 100644 index f8ec26c630..ffebd74deb --- a/typedapi/types/httpemailattachment.go +++ b/typedapi/types/httpemailattachment.go @@ -16,19 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HttpEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L218-L222 type HttpEmailAttachment struct { ContentType *string `json:"content_type,omitempty"` Inline *bool `json:"inline,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` } +func (s *HttpEmailAttachment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "content_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ContentType = &o + + case "inline": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Inline = &value + case bool: + s.Inline = &v + } + + case "request": + if err := dec.Decode(&s.Request); err != nil { + return err + } + + } + } + return nil +} + // NewHttpEmailAttachment returns a HttpEmailAttachment. func NewHttpEmailAttachment() *HttpEmailAttachment { r := &HttpEmailAttachment{} diff --git a/typedapi/types/httpheaders.go b/typedapi/types/httpheaders.go old mode 100755 new mode 100644 index 2256954fe2..aeb04e144f --- a/typedapi/types/httpheaders.go +++ b/typedapi/types/httpheaders.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // HttpHeaders type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L138-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L138-L138 type HttpHeaders map[string][]string diff --git a/typedapi/types/httpinput.go b/typedapi/types/httpinput.go old mode 100755 new mode 100644 index a9d2133997..10b96c4b93 --- a/typedapi/types/httpinput.go +++ b/typedapi/types/httpinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // HttpInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L44-L48 type HttpInput struct { Extract []string `json:"extract,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` diff --git a/typedapi/types/httpinputauthentication.go b/typedapi/types/httpinputauthentication.go old mode 100755 new mode 100644 index 44d891f347..f2f3f88304 --- a/typedapi/types/httpinputauthentication.go +++ b/typedapi/types/httpinputauthentication.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // HttpInputAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L50-L52 type HttpInputAuthentication struct { Basic HttpInputBasicAuthentication `json:"basic"` } diff --git a/typedapi/types/httpinputbasicauthentication.go b/typedapi/types/httpinputbasicauthentication.go old mode 100755 new mode 100644 index 8d6864c2a3..ef3b355d39 --- a/typedapi/types/httpinputbasicauthentication.go +++ b/typedapi/types/httpinputbasicauthentication.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HttpInputBasicAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L54-L57 type HttpInputBasicAuthentication struct { Password string `json:"password"` Username string `json:"username"` } +func (s *HttpInputBasicAuthentication) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "password": + if err := dec.Decode(&s.Password); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil +} + // NewHttpInputBasicAuthentication returns a HttpInputBasicAuthentication. func NewHttpInputBasicAuthentication() *HttpInputBasicAuthentication { r := &HttpInputBasicAuthentication{} diff --git a/typedapi/types/httpinputproxy.go b/typedapi/types/httpinputproxy.go old mode 100755 new mode 100644 index da4ef7882c..7c630505af --- a/typedapi/types/httpinputproxy.go +++ b/typedapi/types/httpinputproxy.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // HttpInputProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L67-L70 type HttpInputProxy struct { Host string `json:"host"` Port uint `json:"port"` } +func (s *HttpInputProxy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "port": + if err := dec.Decode(&s.Port); err != nil { + return err + } + + } + } + return nil +} + // NewHttpInputProxy returns a HttpInputProxy. func NewHttpInputProxy() *HttpInputProxy { r := &HttpInputProxy{} diff --git a/typedapi/types/httpinputrequestdefinition.go b/typedapi/types/httpinputrequestdefinition.go old mode 100755 new mode 100644 index 61e45d30b4..c6a5c1a4a4 --- a/typedapi/types/httpinputrequestdefinition.go +++ b/typedapi/types/httpinputrequestdefinition.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/connectionscheme" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/httpinputmethod" + + "bytes" + "errors" + "io" + + "encoding/json" ) // HttpInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L72-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L72-L86 type HttpInputRequestDefinition struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -44,6 +50,106 @@ type HttpInputRequestDefinition struct { Url *string `json:"url,omitempty"` } +func (s *HttpInputRequestDefinition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auth": + if err := dec.Decode(&s.Auth); err != nil { + return err + } + + case "body": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Body = &o + + case "connection_timeout": + if err := dec.Decode(&s.ConnectionTimeout); err != nil { + return err + } + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "method": + if err := dec.Decode(&s.Method); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]string, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = &o + + case "port": + if err := dec.Decode(&s.Port); err != nil { + return err + } + + case "proxy": + if err := dec.Decode(&s.Proxy); err != nil { + return err + } + + case "read_timeout": + if err := dec.Decode(&s.ReadTimeout); err != nil { + return err + } + + case "scheme": + if err := dec.Decode(&s.Scheme); err != nil { + return err + } + + case "url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Url = &o + + } + } + return nil +} + // NewHttpInputRequestDefinition returns a HttpInputRequestDefinition. func NewHttpInputRequestDefinition() *HttpInputRequestDefinition { r := &HttpInputRequestDefinition{ diff --git a/typedapi/types/httpinputrequestresult.go b/typedapi/types/httpinputrequestresult.go old mode 100755 new mode 100644 index d87dd179e5..3f61030381 --- a/typedapi/types/httpinputrequestresult.go +++ b/typedapi/types/httpinputrequestresult.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/connectionscheme" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/httpinputmethod" + + "bytes" + "errors" + "io" + + "encoding/json" ) // HttpInputRequestResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L300-L300 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L300-L300 type HttpInputRequestResult struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -44,6 +50,106 @@ type HttpInputRequestResult struct { Url *string `json:"url,omitempty"` } +func (s *HttpInputRequestResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auth": + if err := dec.Decode(&s.Auth); err != nil { + return err + } + + case "body": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Body = &o + + case "connection_timeout": + if err := dec.Decode(&s.ConnectionTimeout); err != nil { + return err + } + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "method": + if err := dec.Decode(&s.Method); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]string, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = &o + + case "port": + if err := dec.Decode(&s.Port); err != nil { + return err + } + + case "proxy": + if err := dec.Decode(&s.Proxy); err != nil { + return err + } + + case "read_timeout": + if err := dec.Decode(&s.ReadTimeout); err != nil { + return err + } + + case "scheme": + if err := dec.Decode(&s.Scheme); err != nil { + return err + } + + case "url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Url = &o + + } + } + return nil +} + // NewHttpInputRequestResult returns a HttpInputRequestResult. func NewHttpInputRequestResult() *HttpInputRequestResult { r := &HttpInputRequestResult{ diff --git a/typedapi/types/httpinputresponseresult.go b/typedapi/types/httpinputresponseresult.go old mode 100755 new mode 100644 index 7d75e835b9..ef58e161e5 --- a/typedapi/types/httpinputresponseresult.go +++ b/typedapi/types/httpinputresponseresult.go @@ -16,17 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HttpInputResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L302-L306 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L302-L306 type HttpInputResponseResult struct { - Body string `json:"body"` - Headers map[string][]string `json:"headers"` - Status int `json:"status"` + Body string `json:"body"` + Headers HttpHeaders `json:"headers"` + Status int `json:"status"` +} + +func (s *HttpInputResponseResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "body": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Body = o + + case "headers": + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "status": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Status = value + case float64: + f := int(v) + s.Status = f + } + + } + } + return nil } // NewHttpInputResponseResult returns a HttpInputResponseResult. diff --git a/typedapi/types/hunspelltokenfilter.go b/typedapi/types/hunspelltokenfilter.go old mode 100755 new mode 100644 index fa81b51158..5bc74f2cdd --- a/typedapi/types/hunspelltokenfilter.go +++ b/typedapi/types/hunspelltokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HunspellTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L199-L205 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L200-L206 type HunspellTokenFilter struct { Dedup *bool `json:"dedup,omitempty"` Dictionary *string `json:"dictionary,omitempty"` @@ -32,6 +42,80 @@ type HunspellTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dedup": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Dedup = &value + case bool: + s.Dedup = &v + } + + case "dictionary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Dictionary = &o + + case "locale": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Locale = o + + case "longest_only": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.LongestOnly = &value + case bool: + s.LongestOnly = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewHunspellTokenFilter returns a HunspellTokenFilter. func NewHunspellTokenFilter() *HunspellTokenFilter { r := &HunspellTokenFilter{} diff --git a/typedapi/types/hyperparameter.go b/typedapi/types/hyperparameter.go old mode 100755 new mode 100644 index 19798f9a2a..d1cb984b96 --- a/typedapi/types/hyperparameter.go +++ b/typedapi/types/hyperparameter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Hyperparameter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L206-L220 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L208-L222 type Hyperparameter struct { // AbsoluteImportance A positive number showing how much the parameter influences the variation of // the loss function. For hyperparameters with values that are not specified by @@ -42,6 +52,93 @@ type Hyperparameter struct { Value Float64 `json:"value"` } +func (s *Hyperparameter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "absolute_importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.AbsoluteImportance = &f + case float64: + f := Float64(v) + s.AbsoluteImportance = &f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "relative_importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RelativeImportance = &f + case float64: + f := Float64(v) + s.RelativeImportance = &f + } + + case "supplied": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Supplied = value + case bool: + s.Supplied = v + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + } + } + return nil +} + // NewHyperparameter returns a Hyperparameter. func NewHyperparameter() *Hyperparameter { r := &Hyperparameter{} diff --git a/typedapi/types/hyperparameters.go b/typedapi/types/hyperparameters.go old mode 100755 new mode 100644 index b9c8d18a33..d7f5a8f0ae --- a/typedapi/types/hyperparameters.go +++ b/typedapi/types/hyperparameters.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Hyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L395-L410 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L395-L410 type Hyperparameters struct { Alpha *Float64 `json:"alpha,omitempty"` DownsampleFactor *Float64 `json:"downsample_factor,omitempty"` @@ -40,6 +50,250 @@ type Hyperparameters struct { SoftTreeDepthTolerance *Float64 `json:"soft_tree_depth_tolerance,omitempty"` } +func (s *Hyperparameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Alpha = &f + case float64: + f := Float64(v) + s.Alpha = &f + } + + case "downsample_factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.DownsampleFactor = &f + case float64: + f := Float64(v) + s.DownsampleFactor = &f + } + + case "eta": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Eta = &f + case float64: + f := Float64(v) + s.Eta = &f + } + + case "eta_growth_rate_per_tree": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.EtaGrowthRatePerTree = &f + case float64: + f := Float64(v) + s.EtaGrowthRatePerTree = &f + } + + case "feature_bag_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureBagFraction = &f + case float64: + f := Float64(v) + s.FeatureBagFraction = &f + } + + case "gamma": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gamma = &f + case float64: + f := Float64(v) + s.Gamma = &f + } + + case "lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lambda = &f + case float64: + f := Float64(v) + s.Lambda = &f + } + + case "max_attempts_to_add_tree": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAttemptsToAddTree = &value + case float64: + f := int(v) + s.MaxAttemptsToAddTree = &f + } + + case "max_optimization_rounds_per_hyperparameter": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxOptimizationRoundsPerHyperparameter = &value + case float64: + f := int(v) + s.MaxOptimizationRoundsPerHyperparameter = &f + } + + case "max_trees": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTrees = &value + case float64: + f := int(v) + s.MaxTrees = &f + } + + case "num_folds": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumFolds = &value + case float64: + f := int(v) + s.NumFolds = &f + } + + case "num_splits_per_feature": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumSplitsPerFeature = &value + case float64: + f := int(v) + s.NumSplitsPerFeature = &f + } + + case "soft_tree_depth_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SoftTreeDepthLimit = &value + case float64: + f := int(v) + s.SoftTreeDepthLimit = &f + } + + case "soft_tree_depth_tolerance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.SoftTreeDepthTolerance = &f + case float64: + f := Float64(v) + s.SoftTreeDepthTolerance = &f + } + + } + } + return nil +} + // NewHyperparameters returns a Hyperparameters. func NewHyperparameters() *Hyperparameters { r := &Hyperparameters{} diff --git a/typedapi/types/hyphenationdecompoundertokenfilter.go b/typedapi/types/hyphenationdecompoundertokenfilter.go old mode 100755 new mode 100644 index 6d5d081388..a25bb7af30 --- a/typedapi/types/hyphenationdecompoundertokenfilter.go +++ b/typedapi/types/hyphenationdecompoundertokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // HyphenationDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L58-L60 type HyphenationDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -35,6 +45,119 @@ type HyphenationDecompounderTokenFilter struct { WordListPath *string `json:"word_list_path,omitempty"` } +func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hyphenation_patterns_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HyphenationPatternsPath = &o + + case "max_subword_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxSubwordSize = &value + case float64: + f := int(v) + s.MaxSubwordSize = &f + } + + case "min_subword_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinSubwordSize = &value + case float64: + f := int(v) + s.MinSubwordSize = &f + } + + case "min_word_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordSize = &value + case float64: + f := int(v) + s.MinWordSize = &f + } + + case "only_longest_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OnlyLongestMatch = &value + case bool: + s.OnlyLongestMatch = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "word_list": + if err := dec.Decode(&s.WordList); err != nil { + return err + } + + case "word_list_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WordListPath = &o + + } + } + return nil +} + // NewHyphenationDecompounderTokenFilter returns a HyphenationDecompounderTokenFilter. func NewHyphenationDecompounderTokenFilter() *HyphenationDecompounderTokenFilter { r := &HyphenationDecompounderTokenFilter{} diff --git a/typedapi/types/icuanalyzer.go b/typedapi/types/icuanalyzer.go old mode 100755 new mode 100644 index 82a4bde389..d529a11b7a --- a/typedapi/types/icuanalyzer.go +++ b/typedapi/types/icuanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,7 +27,7 @@ import ( // IcuAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L67-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L67-L71 type IcuAnalyzer struct { Method icunormalizationtype.IcuNormalizationType `json:"method"` Mode icunormalizationmode.IcuNormalizationMode `json:"mode"` diff --git a/typedapi/types/icucollationtokenfilter.go b/typedapi/types/icucollationtokenfilter.go old mode 100755 new mode 100644 index 058bcc5f06..ffefe022f5 --- a/typedapi/types/icucollationtokenfilter.go +++ b/typedapi/types/icucollationtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,11 +25,19 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationcasefirst" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationdecomposition" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationstrength" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IcuCollationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L51-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L51-L65 type IcuCollationTokenFilter struct { Alternate *icucollationalternate.IcuCollationAlternate `json:"alternate,omitempty"` CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"caseFirst,omitempty"` @@ -47,6 +55,138 @@ type IcuCollationTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alternate": + if err := dec.Decode(&s.Alternate); err != nil { + return err + } + + case "caseFirst": + if err := dec.Decode(&s.CaseFirst); err != nil { + return err + } + + case "caseLevel": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CaseLevel = &value + case bool: + s.CaseLevel = &v + } + + case "country": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Country = &o + + case "decomposition": + if err := dec.Decode(&s.Decomposition); err != nil { + return err + } + + case "hiraganaQuaternaryMode": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HiraganaQuaternaryMode = &value + case bool: + s.HiraganaQuaternaryMode = &v + } + + case "language": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Language = &o + + case "numeric": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Numeric = &value + case bool: + s.Numeric = &v + } + + case "rules": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Rules = &o + + case "strength": + if err := dec.Decode(&s.Strength); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "variableTop": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VariableTop = &o + + case "variant": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Variant = &o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuCollationTokenFilter returns a IcuCollationTokenFilter. func NewIcuCollationTokenFilter() *IcuCollationTokenFilter { r := &IcuCollationTokenFilter{} diff --git a/typedapi/types/icufoldingtokenfilter.go b/typedapi/types/icufoldingtokenfilter.go old mode 100755 new mode 100644 index ebdb20a99e..6888e7d1b8 --- a/typedapi/types/icufoldingtokenfilter.go +++ b/typedapi/types/icufoldingtokenfilter.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IcuFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L46-L49 type IcuFoldingTokenFilter struct { Type string `json:"type,omitempty"` UnicodeSetFilter string `json:"unicode_set_filter"` Version *string `json:"version,omitempty"` } +func (s *IcuFoldingTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "unicode_set_filter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UnicodeSetFilter = o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuFoldingTokenFilter returns a IcuFoldingTokenFilter. func NewIcuFoldingTokenFilter() *IcuFoldingTokenFilter { r := &IcuFoldingTokenFilter{} diff --git a/typedapi/types/icunormalizationcharfilter.go b/typedapi/types/icunormalizationcharfilter.go old mode 100755 new mode 100644 index e01b71c09e..6ddcfaf89b --- a/typedapi/types/icunormalizationcharfilter.go +++ b/typedapi/types/icunormalizationcharfilter.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icunormalizationmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icunormalizationtype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IcuNormalizationCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L40-L44 type IcuNormalizationCharFilter struct { Mode *icunormalizationmode.IcuNormalizationMode `json:"mode,omitempty"` Name *icunormalizationtype.IcuNormalizationType `json:"name,omitempty"` @@ -35,6 +41,46 @@ type IcuNormalizationCharFilter struct { Version *string `json:"version,omitempty"` } +func (s *IcuNormalizationCharFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuNormalizationCharFilter returns a IcuNormalizationCharFilter. func NewIcuNormalizationCharFilter() *IcuNormalizationCharFilter { r := &IcuNormalizationCharFilter{} diff --git a/typedapi/types/icunormalizationtokenfilter.go b/typedapi/types/icunormalizationtokenfilter.go old mode 100755 new mode 100644 index 19f6be0461..7872ae8489 --- a/typedapi/types/icunormalizationtokenfilter.go +++ b/typedapi/types/icunormalizationtokenfilter.go @@ -16,23 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icunormalizationtype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IcuNormalizationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L35-L38 type IcuNormalizationTokenFilter struct { Name icunormalizationtype.IcuNormalizationType `json:"name"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *IcuNormalizationTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuNormalizationTokenFilter returns a IcuNormalizationTokenFilter. func NewIcuNormalizationTokenFilter() *IcuNormalizationTokenFilter { r := &IcuNormalizationTokenFilter{} diff --git a/typedapi/types/icutokenizer.go b/typedapi/types/icutokenizer.go old mode 100755 new mode 100644 index 7dde96da9a..7349949836 --- a/typedapi/types/icutokenizer.go +++ b/typedapi/types/icutokenizer.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IcuTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L30-L33 type IcuTokenizer struct { RuleFiles string `json:"rule_files"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *IcuTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "rule_files": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RuleFiles = o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuTokenizer returns a IcuTokenizer. func NewIcuTokenizer() *IcuTokenizer { r := &IcuTokenizer{} diff --git a/typedapi/types/icutransformtokenfilter.go b/typedapi/types/icutransformtokenfilter.go old mode 100755 new mode 100644 index 21a70ed1cd..f2a9664657 --- a/typedapi/types/icutransformtokenfilter.go +++ b/typedapi/types/icutransformtokenfilter.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icutransformdirection" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IcuTransformTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/icu-plugin.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/icu-plugin.ts#L24-L28 type IcuTransformTokenFilter struct { Dir *icutransformdirection.IcuTransformDirection `json:"dir,omitempty"` Id string `json:"id"` @@ -34,6 +40,49 @@ type IcuTransformTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *IcuTransformTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dir": + if err := dec.Decode(&s.Dir); err != nil { + return err + } + + case "id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Id = o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIcuTransformTokenFilter returns a IcuTransformTokenFilter. func NewIcuTransformTokenFilter() *IcuTransformTokenFilter { r := &IcuTransformTokenFilter{} diff --git a/typedapi/types/ids.go b/typedapi/types/ids.go old mode 100755 new mode 100644 index d2c5ead918..d00c21be68 --- a/typedapi/types/ids.go +++ b/typedapi/types/ids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Ids type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L56-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L56-L56 type Ids []string diff --git a/typedapi/types/idsquery.go b/typedapi/types/idsquery.go old mode 100755 new mode 100644 index dc7dc000c0..93fb674b3c --- a/typedapi/types/idsquery.go +++ b/typedapi/types/idsquery.go @@ -16,19 +16,89 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IdsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L53-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L53-L55 type IdsQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Values []string `json:"values,omitempty"` } +func (s *IdsQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "values": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Values = append(s.Values, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Values); err != nil { + return err + } + } + + } + } + return nil +} + // NewIdsQuery returns a IdsQuery. func NewIdsQuery() *IdsQuery { r := &IdsQuery{} diff --git a/typedapi/types/ilm.go b/typedapi/types/ilm.go old mode 100755 new mode 100644 index 1116fa92c0..c45ae4d859 --- a/typedapi/types/ilm.go +++ b/typedapi/types/ilm.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Ilm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L162-L165 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L162-L165 type Ilm struct { PolicyCount int `json:"policy_count"` PolicyStats []IlmPolicyStatistics `json:"policy_stats"` } +func (s *Ilm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "policy_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PolicyCount = value + case float64: + f := int(v) + s.PolicyCount = f + } + + case "policy_stats": + if err := dec.Decode(&s.PolicyStats); err != nil { + return err + } + + } + } + return nil +} + // NewIlm returns a Ilm. func NewIlm() *Ilm { r := &Ilm{} diff --git a/typedapi/types/ilmactions.go b/typedapi/types/ilmactions.go old mode 100755 new mode 100644 index 1b9539a140..116d9169c5 --- a/typedapi/types/ilmactions.go +++ b/typedapi/types/ilmactions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ import "encoding/json" // IlmActions type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L45-L45 -type IlmActions json.RawMessage +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L45-L45 +type IlmActions = json.RawMessage diff --git a/typedapi/types/ilmindicator.go b/typedapi/types/ilmindicator.go new file mode 100644 index 0000000000..a1dff991aa --- /dev/null +++ b/typedapi/types/ilmindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// IlmIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L144-L148 +type IlmIndicator struct { + Details *IlmIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewIlmIndicator returns a IlmIndicator. +func NewIlmIndicator() *IlmIndicator { + r := &IlmIndicator{} + + return r +} diff --git a/typedapi/types/ilmindicatordetails.go b/typedapi/types/ilmindicatordetails.go new file mode 100644 index 0000000000..0a38256468 --- /dev/null +++ b/typedapi/types/ilmindicatordetails.go @@ -0,0 +1,88 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/lifecycleoperationmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// IlmIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L149-L152 +type IlmIndicatorDetails struct { + IlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"ilm_status"` + Policies int64 `json:"policies"` +} + +func (s *IlmIndicatorDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ilm_status": + if err := dec.Decode(&s.IlmStatus); err != nil { + return err + } + + case "policies": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Policies = value + case float64: + f := int64(v) + s.Policies = f + } + + } + } + return nil +} + +// NewIlmIndicatorDetails returns a IlmIndicatorDetails. +func NewIlmIndicatorDetails() *IlmIndicatorDetails { + r := &IlmIndicatorDetails{} + + return r +} diff --git a/typedapi/types/ilmpolicy.go b/typedapi/types/ilmpolicy.go old mode 100755 new mode 100644 index 124fe847cb..db77afd34f --- a/typedapi/types/ilmpolicy.go +++ b/typedapi/types/ilmpolicy.go @@ -16,20 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // IlmPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Policy.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Policy.ts#L23-L26 type IlmPolicy struct { - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` - Phases Phases `json:"phases"` + Meta_ Metadata `json:"_meta,omitempty"` + Phases Phases `json:"phases"` +} + +func (s *IlmPolicy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "phases": + if err := dec.Decode(&s.Phases); err != nil { + return err + } + + } + } + return nil } // NewIlmPolicy returns a IlmPolicy. diff --git a/typedapi/types/ilmpolicystatistics.go b/typedapi/types/ilmpolicystatistics.go old mode 100755 new mode 100644 index c80c572ca8..83a89509f2 --- a/typedapi/types/ilmpolicystatistics.go +++ b/typedapi/types/ilmpolicystatistics.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IlmPolicyStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L157-L160 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L157-L160 type IlmPolicyStatistics struct { IndicesManaged int `json:"indices_managed"` Phases Phases `json:"phases"` } +func (s *IlmPolicyStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indices_managed": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IndicesManaged = value + case float64: + f := int(v) + s.IndicesManaged = f + } + + case "phases": + if err := dec.Decode(&s.Phases); err != nil { + return err + } + + } + } + return nil +} + // NewIlmPolicyStatistics returns a IlmPolicyStatistics. func NewIlmPolicyStatistics() *IlmPolicyStatistics { r := &IlmPolicyStatistics{} diff --git a/typedapi/types/impact.go b/typedapi/types/impact.go new file mode 100644 index 0000000000..26e6b530a0 --- /dev/null +++ b/typedapi/types/impact.go @@ -0,0 +1,107 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/impactarea" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// Impact type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L64-L69 +type Impact struct { + Description string `json:"description"` + Id string `json:"id"` + ImpactAreas []impactarea.ImpactArea `json:"impact_areas"` + Severity int `json:"severity"` +} + +func (s *Impact) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Id = o + + case "impact_areas": + if err := dec.Decode(&s.ImpactAreas); err != nil { + return err + } + + case "severity": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Severity = value + case float64: + f := int(v) + s.Severity = f + } + + } + } + return nil +} + +// NewImpact returns a Impact. +func NewImpact() *Impact { + r := &Impact{} + + return r +} diff --git a/typedapi/types/indexaction.go b/typedapi/types/indexaction.go old mode 100755 new mode 100644 index fce4c748c5..9a3beb453a --- a/typedapi/types/indexaction.go +++ b/typedapi/types/indexaction.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/optype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/refresh" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L256-L265 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L256-L265 type IndexAction struct { DocId *string `json:"doc_id,omitempty"` ExecutionTimeField *string `json:"execution_time_field,omitempty"` @@ -37,6 +43,56 @@ type IndexAction struct { Timeout Duration `json:"timeout,omitempty"` } +func (s *IndexAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_id": + if err := dec.Decode(&s.DocId); err != nil { + return err + } + + case "execution_time_field": + if err := dec.Decode(&s.ExecutionTimeField); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "op_type": + if err := dec.Decode(&s.OpType); err != nil { + return err + } + + case "refresh": + if err := dec.Decode(&s.Refresh); err != nil { + return err + } + + case "timeout": + if err := dec.Decode(&s.Timeout); err != nil { + return err + } + + } + } + return nil +} + // NewIndexAction returns a IndexAction. func NewIndexAction() *IndexAction { r := &IndexAction{} diff --git a/typedapi/types/indexaliases.go b/typedapi/types/indexaliases.go old mode 100755 new mode 100644 index 08ab7f1567..91743c1c2b --- a/typedapi/types/indexaliases.go +++ b/typedapi/types/indexaliases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexAliases type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_alias/IndicesGetAliasResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_alias/IndicesGetAliasResponse.ts#L36-L38 type IndexAliases struct { Aliases map[string]AliasDefinition `json:"aliases"` } diff --git a/typedapi/types/indexanddatastreamaction.go b/typedapi/types/indexanddatastreamaction.go old mode 100755 new mode 100644 index 7829835e13..c76253199a --- a/typedapi/types/indexanddatastreamaction.go +++ b/typedapi/types/indexanddatastreamaction.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexAndDataStreamAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/modify_data_stream/types.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/modify_data_stream/types.ts#L28-L31 type IndexAndDataStreamAction struct { DataStream string `json:"data_stream"` Index string `json:"index"` } +func (s *IndexAndDataStreamAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + } + } + return nil +} + // NewIndexAndDataStreamAction returns a IndexAndDataStreamAction. func NewIndexAndDataStreamAction() *IndexAndDataStreamAction { r := &IndexAndDataStreamAction{} diff --git a/typedapi/types/indexcapabilities.go b/typedapi/types/indexcapabilities.go old mode 100755 new mode 100644 index b1874c1a1c..c72fe7edb0 --- a/typedapi/types/indexcapabilities.go +++ b/typedapi/types/indexcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 type IndexCapabilities struct { RollupJobs []RollupJobSummary `json:"rollup_jobs"` } diff --git a/typedapi/types/indexdetails.go b/typedapi/types/indexdetails.go old mode 100755 new mode 100644 index 6d38cc3167..97c1874896 --- a/typedapi/types/indexdetails.go +++ b/typedapi/types/indexdetails.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 type IndexDetails struct { MaxSegmentsPerShard int64 `json:"max_segments_per_shard"` ShardCount int `json:"shard_count"` @@ -30,6 +40,77 @@ type IndexDetails struct { SizeInBytes int64 `json:"size_in_bytes"` } +func (s *IndexDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_segments_per_shard": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxSegmentsPerShard = value + case float64: + f := int64(v) + s.MaxSegmentsPerShard = f + } + + case "shard_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardCount = value + case float64: + f := int(v) + s.ShardCount = f + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewIndexDetails returns a IndexDetails. func NewIndexDetails() *IndexDetails { r := &IndexDetails{} diff --git a/typedapi/types/indexfield.go b/typedapi/types/indexfield.go old mode 100755 new mode 100644 index f555776cf3..8ebf0fe0b9 --- a/typedapi/types/indexfield.go +++ b/typedapi/types/indexfield.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L46-L48 type IndexField struct { Enabled bool `json:"enabled"` } +func (s *IndexField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewIndexField returns a IndexField. func NewIndexField() *IndexField { r := &IndexField{} diff --git a/typedapi/types/indexhealthstats.go b/typedapi/types/indexhealthstats.go old mode 100755 new mode 100644 index dad0bec820..2bb50bd773 --- a/typedapi/types/indexhealthstats.go +++ b/typedapi/types/indexhealthstats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndexHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/health/types.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/health/types.ts#L24-L34 type IndexHealthStats struct { ActivePrimaryShards int `json:"active_primary_shards"` ActiveShards int `json:"active_shards"` @@ -39,6 +47,151 @@ type IndexHealthStats struct { UnassignedShards int `json:"unassigned_shards"` } +func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active_primary_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActivePrimaryShards = value + case float64: + f := int(v) + s.ActivePrimaryShards = f + } + + case "active_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActiveShards = value + case float64: + f := int(v) + s.ActiveShards = f + } + + case "initializing_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InitializingShards = value + case float64: + f := int(v) + s.InitializingShards = f + } + + case "number_of_replicas": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfReplicas = value + case float64: + f := int(v) + s.NumberOfReplicas = f + } + + case "number_of_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfShards = value + case float64: + f := int(v) + s.NumberOfShards = f + } + + case "relocating_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelocatingShards = value + case float64: + f := int(v) + s.RelocatingShards = f + } + + case "shards": + if s.Shards == nil { + s.Shards = make(map[string]ShardHealthStats, 0) + } + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "unassigned_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UnassignedShards = value + case float64: + f := int(v) + s.UnassignedShards = f + } + + } + } + return nil +} + // NewIndexHealthStats returns a IndexHealthStats. func NewIndexHealthStats() *IndexHealthStats { r := &IndexHealthStats{ diff --git a/typedapi/types/indexingpressurememorysummary.go b/typedapi/types/indexingpressurememorysummary.go old mode 100755 new mode 100644 index 12fdf04ac5..eef74290df --- a/typedapi/types/indexingpressurememorysummary.go +++ b/typedapi/types/indexingpressurememorysummary.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexingPressureMemorySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L309-L318 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L309-L318 type IndexingPressureMemorySummary struct { AllInBytes int64 `json:"all_in_bytes"` CombinedCoordinatingAndPrimaryInBytes int64 `json:"combined_coordinating_and_primary_in_bytes"` @@ -34,6 +44,146 @@ type IndexingPressureMemorySummary struct { ReplicaRejections *int64 `json:"replica_rejections,omitempty"` } +func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "all_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AllInBytes = value + case float64: + f := int64(v) + s.AllInBytes = f + } + + case "combined_coordinating_and_primary_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CombinedCoordinatingAndPrimaryInBytes = value + case float64: + f := int64(v) + s.CombinedCoordinatingAndPrimaryInBytes = f + } + + case "coordinating_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CoordinatingInBytes = value + case float64: + f := int64(v) + s.CoordinatingInBytes = f + } + + case "coordinating_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CoordinatingRejections = &value + case float64: + f := int64(v) + s.CoordinatingRejections = &f + } + + case "primary_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryInBytes = value + case float64: + f := int64(v) + s.PrimaryInBytes = f + } + + case "primary_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryRejections = &value + case float64: + f := int64(v) + s.PrimaryRejections = &f + } + + case "replica_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReplicaInBytes = value + case float64: + f := int64(v) + s.ReplicaInBytes = f + } + + case "replica_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReplicaRejections = &value + case float64: + f := int64(v) + s.ReplicaRejections = &f + } + + } + } + return nil +} + // NewIndexingPressureMemorySummary returns a IndexingPressureMemorySummary. func NewIndexingPressureMemorySummary() *IndexingPressureMemorySummary { r := &IndexingPressureMemorySummary{} diff --git a/typedapi/types/indexingstats.go b/typedapi/types/indexingstats.go old mode 100755 new mode 100644 index 51c21931f5..e0d253191e --- a/typedapi/types/indexingstats.go +++ b/typedapi/types/indexingstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L101-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L101-L117 type IndexingStats struct { DeleteCurrent int64 `json:"delete_current"` DeleteTime Duration `json:"delete_time,omitempty"` @@ -41,6 +51,184 @@ type IndexingStats struct { WriteLoad *Float64 `json:"write_load,omitempty"` } +func (s *IndexingStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "delete_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DeleteCurrent = value + case float64: + f := int64(v) + s.DeleteCurrent = f + } + + case "delete_time": + if err := dec.Decode(&s.DeleteTime); err != nil { + return err + } + + case "delete_time_in_millis": + if err := dec.Decode(&s.DeleteTimeInMillis); err != nil { + return err + } + + case "delete_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DeleteTotal = value + case float64: + f := int64(v) + s.DeleteTotal = f + } + + case "index_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexCurrent = value + case float64: + f := int64(v) + s.IndexCurrent = f + } + + case "index_failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexFailed = value + case float64: + f := int64(v) + s.IndexFailed = f + } + + case "index_time": + if err := dec.Decode(&s.IndexTime); err != nil { + return err + } + + case "index_time_in_millis": + if err := dec.Decode(&s.IndexTimeInMillis); err != nil { + return err + } + + case "index_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexTotal = value + case float64: + f := int64(v) + s.IndexTotal = f + } + + case "is_throttled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsThrottled = value + case bool: + s.IsThrottled = v + } + + case "noop_update_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NoopUpdateTotal = value + case float64: + f := int64(v) + s.NoopUpdateTotal = f + } + + case "throttle_time": + if err := dec.Decode(&s.ThrottleTime); err != nil { + return err + } + + case "throttle_time_in_millis": + if err := dec.Decode(&s.ThrottleTimeInMillis); err != nil { + return err + } + + case "types": + if s.Types == nil { + s.Types = make(map[string]IndexingStats, 0) + } + if err := dec.Decode(&s.Types); err != nil { + return err + } + + case "write_load": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.WriteLoad = &f + case float64: + f := Float64(v) + s.WriteLoad = &f + } + + } + } + return nil +} + // NewIndexingStats returns a IndexingStats. func NewIndexingStats() *IndexingStats { r := &IndexingStats{ diff --git a/typedapi/types/indexmappingrecord.go b/typedapi/types/indexmappingrecord.go old mode 100755 new mode 100644 index 5580422a5e..2ea8bef10b --- a/typedapi/types/indexmappingrecord.go +++ b/typedapi/types/indexmappingrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexMappingRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L28-L31 type IndexMappingRecord struct { Item *TypeMapping `json:"item,omitempty"` Mappings TypeMapping `json:"mappings"` diff --git a/typedapi/types/indexpatterns.go b/typedapi/types/indexpatterns.go old mode 100755 new mode 100644 index e7db3a403a..76f6e5a1ae --- a/typedapi/types/indexpatterns.go +++ b/typedapi/types/indexpatterns.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexPatterns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L64-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L64-L64 type IndexPatterns []string diff --git a/typedapi/types/indexprivilegescheck.go b/typedapi/types/indexprivilegescheck.go old mode 100755 new mode 100644 index 7ac0daf9f2..e1028c53fe --- a/typedapi/types/indexprivilegescheck.go +++ b/typedapi/types/indexprivilegescheck.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexprivilege" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndexPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/types.ts#L33-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/types.ts#L33-L44 type IndexPrivilegesCheck struct { // AllowRestrictedIndices This needs to be set to true (default is false) if using wildcards or regexps // for patterns that cover restricted indices. @@ -43,6 +51,61 @@ type IndexPrivilegesCheck struct { Privileges []indexprivilege.IndexPrivilege `json:"privileges"` } +func (s *IndexPrivilegesCheck) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_restricted_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowRestrictedIndices = &value + case bool: + s.AllowRestrictedIndices = &v + } + + case "names": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Names = append(s.Names, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { + return err + } + } + + case "privileges": + if err := dec.Decode(&s.Privileges); err != nil { + return err + } + + } + } + return nil +} + // NewIndexPrivilegesCheck returns a IndexPrivilegesCheck. func NewIndexPrivilegesCheck() *IndexPrivilegesCheck { r := &IndexPrivilegesCheck{} diff --git a/typedapi/types/indexresult.go b/typedapi/types/indexresult.go old mode 100755 new mode 100644 index 67032e8e26..96edc7914b --- a/typedapi/types/indexresult.go +++ b/typedapi/types/indexresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L267-L269 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L267-L269 type IndexResult struct { Response IndexResultSummary `json:"response"` } diff --git a/typedapi/types/indexresultsummary.go b/typedapi/types/indexresultsummary.go old mode 100755 new mode 100644 index cc82a86caf..4a5af8a758 --- a/typedapi/types/indexresultsummary.go +++ b/typedapi/types/indexresultsummary.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/result" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndexResultSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L271-L277 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L271-L277 type IndexResultSummary struct { Created bool `json:"created"` Id string `json:"id"` @@ -35,6 +43,60 @@ type IndexResultSummary struct { Version int64 `json:"version"` } +func (s *IndexResultSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "created": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Created = value + case bool: + s.Created = v + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "result": + if err := dec.Decode(&s.Result); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIndexResultSummary returns a IndexResultSummary. func NewIndexResultSummary() *IndexResultSummary { r := &IndexResultSummary{} diff --git a/typedapi/types/indexrouting.go b/typedapi/types/indexrouting.go old mode 100755 new mode 100644 index 8691592445..6dfee02d2f --- a/typedapi/types/indexrouting.go +++ b/typedapi/types/indexrouting.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L22-L25 type IndexRouting struct { Allocation *IndexRoutingAllocation `json:"allocation,omitempty"` Rebalance *IndexRoutingRebalance `json:"rebalance,omitempty"` diff --git a/typedapi/types/indexroutingallocation.go b/typedapi/types/indexroutingallocation.go old mode 100755 new mode 100644 index 50c5205ae7..2cd0d1c016 --- a/typedapi/types/indexroutingallocation.go +++ b/typedapi/types/indexroutingallocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // IndexRoutingAllocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L27-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L27-L32 type IndexRoutingAllocation struct { Disk *IndexRoutingAllocationDisk `json:"disk,omitempty"` Enable *indexroutingallocationoptions.IndexRoutingAllocationOptions `json:"enable,omitempty"` diff --git a/typedapi/types/indexroutingallocationdisk.go b/typedapi/types/indexroutingallocationdisk.go old mode 100755 new mode 100644 index 7c570c56dc..60ba0a6920 --- a/typedapi/types/indexroutingallocationdisk.go +++ b/typedapi/types/indexroutingallocationdisk.go @@ -16,17 +16,52 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // IndexRoutingAllocationDisk type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L62-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L62-L64 type IndexRoutingAllocationDisk struct { ThresholdEnabled string `json:"threshold_enabled,omitempty"` } +func (s *IndexRoutingAllocationDisk) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "threshold_enabled": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ThresholdEnabled = o + + } + } + return nil +} + // NewIndexRoutingAllocationDisk returns a IndexRoutingAllocationDisk. func NewIndexRoutingAllocationDisk() *IndexRoutingAllocationDisk { r := &IndexRoutingAllocationDisk{} diff --git a/typedapi/types/indexroutingallocationinclude.go b/typedapi/types/indexroutingallocationinclude.go old mode 100755 new mode 100644 index de09b720da..42a2704299 --- a/typedapi/types/indexroutingallocationinclude.go +++ b/typedapi/types/indexroutingallocationinclude.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexRoutingAllocationInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L52-L55 type IndexRoutingAllocationInclude struct { Id_ *string `json:"_id,omitempty"` TierPreference_ *string `json:"_tier_preference,omitempty"` } +func (s *IndexRoutingAllocationInclude) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_tier_preference": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TierPreference_ = &o + + } + } + return nil +} + // NewIndexRoutingAllocationInclude returns a IndexRoutingAllocationInclude. func NewIndexRoutingAllocationInclude() *IndexRoutingAllocationInclude { r := &IndexRoutingAllocationInclude{} diff --git a/typedapi/types/indexroutingallocationinitialrecovery.go b/typedapi/types/indexroutingallocationinitialrecovery.go old mode 100755 new mode 100644 index d7f2753d42..6f4d48f014 --- a/typedapi/types/indexroutingallocationinitialrecovery.go +++ b/typedapi/types/indexroutingallocationinitialrecovery.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexRoutingAllocationInitialRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L57-L59 type IndexRoutingAllocationInitialRecovery struct { Id_ *string `json:"_id,omitempty"` } +func (s *IndexRoutingAllocationInitialRecovery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + } + } + return nil +} + // NewIndexRoutingAllocationInitialRecovery returns a IndexRoutingAllocationInitialRecovery. func NewIndexRoutingAllocationInitialRecovery() *IndexRoutingAllocationInitialRecovery { r := &IndexRoutingAllocationInitialRecovery{} diff --git a/typedapi/types/indexroutingrebalance.go b/typedapi/types/indexroutingrebalance.go old mode 100755 new mode 100644 index f83621337e..c3fe37e0cd --- a/typedapi/types/indexroutingrebalance.go +++ b/typedapi/types/indexroutingrebalance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // IndexRoutingRebalance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexRouting.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexRouting.ts#L34-L36 type IndexRoutingRebalance struct { Enable indexroutingrebalanceoptions.IndexRoutingRebalanceOptions `json:"enable"` } diff --git a/typedapi/types/indexsegment.go b/typedapi/types/indexsegment.go old mode 100755 new mode 100644 index 2b1bf833c2..21877c4083 --- a/typedapi/types/indexsegment.go +++ b/typedapi/types/indexsegment.go @@ -16,17 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // IndexSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/segments/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/segments/types.ts#L24-L26 type IndexSegment struct { Shards map[string][]ShardsSegment `json:"shards"` } +func (s *IndexSegment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "shards": + if s.Shards == nil { + s.Shards = make(map[string][]ShardsSegment, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := NewShardsSegment() + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Shards[key] = append(s.Shards[key], *o) + default: + o := []ShardsSegment{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Shards[key] = o + } + } + + } + } + return nil +} + // NewIndexSegment returns a IndexSegment. func NewIndexSegment() *IndexSegment { r := &IndexSegment{ diff --git a/typedapi/types/indexsegmentsort.go b/typedapi/types/indexsegmentsort.go old mode 100755 new mode 100644 index 047b22b796..18aa297147 --- a/typedapi/types/indexsegmentsort.go +++ b/typedapi/types/indexsegmentsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,11 +24,17 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/segmentsortmissing" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/segmentsortmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/segmentsortorder" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IndexSegmentSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSegmentSort.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSegmentSort.ts#L22-L27 type IndexSegmentSort struct { Field []string `json:"field,omitempty"` Missing []segmentsortmissing.SegmentSortMissing `json:"missing,omitempty"` @@ -36,6 +42,90 @@ type IndexSegmentSort struct { Order []segmentsortorder.SegmentSortOrder `json:"order,omitempty"` } +func (s *IndexSegmentSort) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Field = append(s.Field, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Field); err != nil { + return err + } + } + + case "missing": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := &segmentsortmissing.SegmentSortMissing{} + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Missing = append(s.Missing, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Missing); err != nil { + return err + } + } + + case "mode": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := &segmentsortmode.SegmentSortMode{} + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Mode = append(s.Mode, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Mode); err != nil { + return err + } + } + + case "order": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := &segmentsortorder.SegmentSortOrder{} + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Order = append(s.Order, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Order); err != nil { + return err + } + } + + } + } + return nil +} + // NewIndexSegmentSort returns a IndexSegmentSort. func NewIndexSegmentSort() *IndexSegmentSort { r := &IndexSegmentSort{} diff --git a/typedapi/types/indexsettingblocks.go b/typedapi/types/indexsettingblocks.go old mode 100755 new mode 100644 index e53ad64ecb..9f2f788d63 --- a/typedapi/types/indexsettingblocks.go +++ b/typedapi/types/indexsettingblocks.go @@ -16,19 +16,104 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexSettingBlocks type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L245-L251 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L245-L251 type IndexSettingBlocks struct { - Metadata *bool `json:"metadata,omitempty"` - Read *bool `json:"read,omitempty"` - ReadOnly *bool `json:"read_only,omitempty"` - ReadOnlyAllowDelete *bool `json:"read_only_allow_delete,omitempty"` - Write string `json:"write,omitempty"` + Metadata Stringifiedboolean `json:"metadata,omitempty"` + Read *bool `json:"read,omitempty"` + ReadOnly *bool `json:"read_only,omitempty"` + ReadOnlyAllowDelete *bool `json:"read_only_allow_delete,omitempty"` + Write string `json:"write,omitempty"` +} + +func (s *IndexSettingBlocks) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "read": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Read = &value + case bool: + s.Read = &v + } + + case "read_only": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ReadOnly = &value + case bool: + s.ReadOnly = &v + } + + case "read_only_allow_delete": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ReadOnlyAllowDelete = &value + case bool: + s.ReadOnlyAllowDelete = &v + } + + case "write": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Write = o + + } + } + return nil } // NewIndexSettingBlocks returns a IndexSettingBlocks. diff --git a/typedapi/types/indexsettings.go b/typedapi/types/indexsettings.go old mode 100755 new mode 100644 index 7e90b8916f..9562f4b18e --- a/typedapi/types/indexsettings.go +++ b/typedapi/types/indexsettings.go @@ -16,20 +16,27 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexcheckonstartup" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndexSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L69-L168 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L69-L168 type IndexSettings struct { Analysis *IndexSettingsAnalysis `json:"analysis,omitempty"` // Analyze Settings to define analyzers, tokenizers, token filters and character @@ -78,7 +85,7 @@ type IndexSettings struct { QueryString *SettingsQueryString `json:"query_string,omitempty"` RefreshInterval Duration `json:"refresh_interval,omitempty"` Routing *IndexRouting `json:"routing,omitempty"` - RoutingPartitionSize *int `json:"routing_partition_size,omitempty"` + RoutingPartitionSize Stringifiedinteger `json:"routing_partition_size,omitempty"` RoutingPath []string `json:"routing_path,omitempty"` Search *SettingsSearch `json:"search,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` @@ -99,6 +106,525 @@ type IndexSettings struct { Version *IndexVersioning `json:"version,omitempty"` } +func (s *IndexSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analysis": + if err := dec.Decode(&s.Analysis); err != nil { + return err + } + + case "analyze": + if err := dec.Decode(&s.Analyze); err != nil { + return err + } + + case "auto_expand_replicas": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AutoExpandReplicas = &o + + case "blocks": + if err := dec.Decode(&s.Blocks); err != nil { + return err + } + + case "check_on_startup": + if err := dec.Decode(&s.CheckOnStartup); err != nil { + return err + } + + case "codec": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Codec = &o + + case "creation_date": + if err := dec.Decode(&s.CreationDate); err != nil { + return err + } + + case "creation_date_string": + if err := dec.Decode(&s.CreationDateString); err != nil { + return err + } + + case "default_pipeline": + if err := dec.Decode(&s.DefaultPipeline); err != nil { + return err + } + + case "final_pipeline": + if err := dec.Decode(&s.FinalPipeline); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = o + + case "gc_deletes": + if err := dec.Decode(&s.GcDeletes); err != nil { + return err + } + + case "hidden": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Hidden = o + + case "highlight": + if err := dec.Decode(&s.Highlight); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "indexing_pressure": + if err := dec.Decode(&s.IndexingPressure); err != nil { + return err + } + + case "indexing.slowlog": + if err := dec.Decode(&s.IndexingSlowlog); err != nil { + return err + } + + case "lifecycle": + if err := dec.Decode(&s.Lifecycle); err != nil { + return err + } + + case "load_fixed_bitset_filters_eagerly": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.LoadFixedBitsetFiltersEagerly = &value + case bool: + s.LoadFixedBitsetFiltersEagerly = &v + } + + case "mapping": + if err := dec.Decode(&s.Mapping); err != nil { + return err + } + + case "max_docvalue_fields_search": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDocvalueFieldsSearch = &value + case float64: + f := int(v) + s.MaxDocvalueFieldsSearch = &f + } + + case "max_inner_result_window": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxInnerResultWindow = &value + case float64: + f := int(v) + s.MaxInnerResultWindow = &f + } + + case "max_ngram_diff": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxNgramDiff = &value + case float64: + f := int(v) + s.MaxNgramDiff = &f + } + + case "max_refresh_listeners": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxRefreshListeners = &value + case float64: + f := int(v) + s.MaxRefreshListeners = &f + } + + case "max_regex_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxRegexLength = &value + case float64: + f := int(v) + s.MaxRegexLength = &f + } + + case "max_rescore_window": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxRescoreWindow = &value + case float64: + f := int(v) + s.MaxRescoreWindow = &f + } + + case "max_result_window": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxResultWindow = &value + case float64: + f := int(v) + s.MaxResultWindow = &f + } + + case "max_script_fields": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxScriptFields = &value + case float64: + f := int(v) + s.MaxScriptFields = &f + } + + case "max_shingle_diff": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxShingleDiff = &value + case float64: + f := int(v) + s.MaxShingleDiff = &f + } + + case "max_slices_per_scroll": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxSlicesPerScroll = &value + case float64: + f := int(v) + s.MaxSlicesPerScroll = &f + } + + case "max_terms_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTermsCount = &value + case float64: + f := int(v) + s.MaxTermsCount = &f + } + + case "merge": + if err := dec.Decode(&s.Merge); err != nil { + return err + } + + case "mode": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Mode = &o + + case "number_of_replicas": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NumberOfReplicas = o + + case "number_of_routing_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfRoutingShards = &value + case float64: + f := int(v) + s.NumberOfRoutingShards = &f + } + + case "number_of_shards": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NumberOfShards = o + + case "priority": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Priority = o + + case "provided_name": + if err := dec.Decode(&s.ProvidedName); err != nil { + return err + } + + case "queries": + if err := dec.Decode(&s.Queries); err != nil { + return err + } + + case "query_string": + if err := dec.Decode(&s.QueryString); err != nil { + return err + } + + case "refresh_interval": + if err := dec.Decode(&s.RefreshInterval); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "routing_partition_size": + if err := dec.Decode(&s.RoutingPartitionSize); err != nil { + return err + } + + case "routing_path": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.RoutingPath = append(s.RoutingPath, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.RoutingPath); err != nil { + return err + } + } + + case "search": + if err := dec.Decode(&s.Search); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shards = &value + case float64: + f := int(v) + s.Shards = &f + } + + case "similarity": + if err := dec.Decode(&s.Similarity); err != nil { + return err + } + + case "soft_deletes": + if err := dec.Decode(&s.SoftDeletes); err != nil { + return err + } + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "store": + if err := dec.Decode(&s.Store); err != nil { + return err + } + + case "time_series": + if err := dec.Decode(&s.TimeSeries); err != nil { + return err + } + + case "top_metrics_max_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TopMetricsMaxSize = &value + case float64: + f := int(v) + s.TopMetricsMaxSize = &f + } + + case "translog": + if err := dec.Decode(&s.Translog); err != nil { + return err + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + case "verified_before_close": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VerifiedBeforeClose = o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + default: + + if key, ok := t.(string); ok { + if s.IndexSettings == nil { + s.IndexSettings = make(map[string]json.RawMessage, 0) + } + raw := new(json.RawMessage) + if err := dec.Decode(&raw); err != nil { + return err + } + s.IndexSettings[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s IndexSettings) MarshalJSON() ([]byte, error) { type opt IndexSettings @@ -118,6 +644,7 @@ func (s IndexSettings) MarshalJSON() ([]byte, error) { for key, value := range s.IndexSettings { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "IndexSettings") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/indexsettingsanalysis.go b/typedapi/types/indexsettingsanalysis.go old mode 100755 new mode 100644 index ed9d47e751..924509f4a0 --- a/typedapi/types/indexsettingsanalysis.go +++ b/typedapi/types/indexsettingsanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // IndexSettingsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L310-L316 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L310-L316 type IndexSettingsAnalysis struct { Analyzer map[string]Analyzer `json:"analyzer,omitempty"` CharFilter map[string]CharFilter `json:"char_filter,omitempty"` @@ -40,6 +40,7 @@ type IndexSettingsAnalysis struct { } func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -54,6 +55,9 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { switch t { case "analyzer": + if s.Analyzer == nil { + s.Analyzer = make(map[string]Analyzer, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -149,13 +153,16 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { } s.Analyzer[key] = oo default: - if err := dec.Decode(&s.Analyzer); err != nil { + if err := localDec.Decode(&s.Analyzer); err != nil { return err } } } case "char_filter": + if s.CharFilter == nil { + s.CharFilter = make(map[string]CharFilter, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -197,13 +204,16 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { } s.CharFilter[key] = oo default: - if err := dec.Decode(&s.CharFilter); err != nil { + if err := localDec.Decode(&s.CharFilter); err != nil { return err } } } case "filter": + if s.Filter == nil { + s.Filter = make(map[string]TokenFilter, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -503,13 +513,16 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { } s.Filter[key] = oo default: - if err := dec.Decode(&s.Filter); err != nil { + if err := localDec.Decode(&s.Filter); err != nil { return err } } } case "normalizer": + if s.Normalizer == nil { + s.Normalizer = make(map[string]Normalizer, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -533,13 +546,16 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { } s.Normalizer[key] = oo default: - if err := dec.Decode(&s.Normalizer); err != nil { + if err := localDec.Decode(&s.Normalizer); err != nil { return err } } } case "tokenizer": + if s.Tokenizer == nil { + s.Tokenizer = make(map[string]Tokenizer, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -635,7 +651,7 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { } s.Tokenizer[key] = oo default: - if err := dec.Decode(&s.Tokenizer); err != nil { + if err := localDec.Decode(&s.Tokenizer); err != nil { return err } } diff --git a/typedapi/types/indexsettingslifecycle.go b/typedapi/types/indexsettingslifecycle.go old mode 100755 new mode 100644 index 9ffd338406..940bfc1994 --- a/typedapi/types/indexsettingslifecycle.go +++ b/typedapi/types/indexsettingslifecycle.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexSettingsLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L267-L300 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L267-L300 type IndexSettingsLifecycle struct { // IndexingComplete Indicates whether or not the index has been rolled over. Automatically set to // true when ILM completes the rollover action. @@ -55,6 +65,87 @@ type IndexSettingsLifecycle struct { Step *IndexSettingsLifecycleStep `json:"step,omitempty"` } +func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indexing_complete": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IndexingComplete = &value + case bool: + s.IndexingComplete = &v + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "origination_date": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OriginationDate = &value + case float64: + f := int64(v) + s.OriginationDate = &f + } + + case "parse_origination_date": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ParseOriginationDate = &value + case bool: + s.ParseOriginationDate = &v + } + + case "rollover_alias": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RolloverAlias = &o + + case "step": + if err := dec.Decode(&s.Step); err != nil { + return err + } + + } + } + return nil +} + // NewIndexSettingsLifecycle returns a IndexSettingsLifecycle. func NewIndexSettingsLifecycle() *IndexSettingsLifecycle { r := &IndexSettingsLifecycle{} diff --git a/typedapi/types/indexsettingslifecyclestep.go b/typedapi/types/indexsettingslifecyclestep.go old mode 100755 new mode 100644 index f8e366b37b..ac2670a3f6 --- a/typedapi/types/indexsettingslifecyclestep.go +++ b/typedapi/types/indexsettingslifecyclestep.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexSettingsLifecycleStep type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L302-L308 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L302-L308 type IndexSettingsLifecycleStep struct { // WaitTimeThreshold Time to wait for the cluster to resolve allocation issues during an ILM // shrink action. Must be greater than 1h (1 hour). @@ -30,6 +38,31 @@ type IndexSettingsLifecycleStep struct { WaitTimeThreshold Duration `json:"wait_time_threshold,omitempty"` } +func (s *IndexSettingsLifecycleStep) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "wait_time_threshold": + if err := dec.Decode(&s.WaitTimeThreshold); err != nil { + return err + } + + } + } + return nil +} + // NewIndexSettingsLifecycleStep returns a IndexSettingsLifecycleStep. func NewIndexSettingsLifecycleStep() *IndexSettingsLifecycleStep { r := &IndexSettingsLifecycleStep{} diff --git a/typedapi/types/indexsettingstimeseries.go b/typedapi/types/indexsettingstimeseries.go old mode 100755 new mode 100644 index fa46999b6c..5d306168c5 --- a/typedapi/types/indexsettingstimeseries.go +++ b/typedapi/types/indexsettingstimeseries.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexSettingsTimeSeries type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L318-L321 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L318-L321 type IndexSettingsTimeSeries struct { EndTime DateTime `json:"end_time,omitempty"` StartTime DateTime `json:"start_time,omitempty"` } +func (s *IndexSettingsTimeSeries) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "end_time": + if err := dec.Decode(&s.EndTime); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + } + } + return nil +} + // NewIndexSettingsTimeSeries returns a IndexSettingsTimeSeries. func NewIndexSettingsTimeSeries() *IndexSettingsTimeSeries { r := &IndexSettingsTimeSeries{} diff --git a/typedapi/types/indexstate.go b/typedapi/types/indexstate.go old mode 100755 new mode 100644 index c62bcd5f94..0f84740eae --- a/typedapi/types/indexstate.go +++ b/typedapi/types/indexstate.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexState.ts#L26-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexState.ts#L26-L33 type IndexState struct { Aliases map[string]Alias `json:"aliases,omitempty"` DataStream *string `json:"data_stream,omitempty"` @@ -32,6 +40,54 @@ type IndexState struct { Settings *IndexSettings `json:"settings,omitempty"` } +func (s *IndexState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]Alias, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return err + } + + case "defaults": + if err := dec.Decode(&s.Defaults); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + } + } + return nil +} + // NewIndexState returns a IndexState. func NewIndexState() *IndexState { r := &IndexState{ diff --git a/typedapi/types/indexstats.go b/typedapi/types/indexstats.go old mode 100755 new mode 100644 index b695882175..78461b1c61 --- a/typedapi/types/indexstats.go +++ b/typedapi/types/indexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L52-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L52-L90 type IndexStats struct { Bulk *BulkStats `json:"bulk,omitempty"` // Completion Contains statistics about completions across all shards assigned to the node. diff --git a/typedapi/types/indextemplate.go b/typedapi/types/indextemplate.go old mode 100755 new mode 100644 index d182f3bd71..8b07bb2536 --- a/typedapi/types/indextemplate.go +++ b/typedapi/types/indextemplate.go @@ -16,28 +16,124 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // IndexTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexTemplate.ts#L27-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexTemplate.ts#L27-L37 type IndexTemplate struct { AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` ComposedOf []string `json:"composed_of"` DataStream *IndexTemplateDataStreamConfiguration `json:"data_stream,omitempty"` IndexPatterns []string `json:"index_patterns"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ Metadata `json:"_meta,omitempty"` Priority *int64 `json:"priority,omitempty"` Template *IndexTemplateSummary `json:"template,omitempty"` Version *int64 `json:"version,omitempty"` } +func (s *IndexTemplate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_auto_create": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowAutoCreate = &value + case bool: + s.AllowAutoCreate = &v + } + + case "composed_of": + if err := dec.Decode(&s.ComposedOf); err != nil { + return err + } + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return err + } + + case "index_patterns": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.IndexPatterns = append(s.IndexPatterns, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { + return err + } + } + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "priority": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Priority = &value + case float64: + f := int64(v) + s.Priority = &f + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIndexTemplate returns a IndexTemplate. func NewIndexTemplate() *IndexTemplate { r := &IndexTemplate{} diff --git a/typedapi/types/indextemplatedatastreamconfiguration.go b/typedapi/types/indextemplatedatastreamconfiguration.go old mode 100755 new mode 100644 index efae062191..6b20b28dd4 --- a/typedapi/types/indextemplatedatastreamconfiguration.go +++ b/typedapi/types/indextemplatedatastreamconfiguration.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndexTemplateDataStreamConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexTemplate.ts#L39-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexTemplate.ts#L39-L50 type IndexTemplateDataStreamConfiguration struct { // AllowCustomRouting If true, the data stream supports custom routing. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` @@ -30,6 +40,54 @@ type IndexTemplateDataStreamConfiguration struct { Hidden *bool `json:"hidden,omitempty"` } +func (s *IndexTemplateDataStreamConfiguration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_custom_routing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowCustomRouting = &value + case bool: + s.AllowCustomRouting = &v + } + + case "hidden": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Hidden = &value + case bool: + s.Hidden = &v + } + + } + } + return nil +} + // NewIndexTemplateDataStreamConfiguration returns a IndexTemplateDataStreamConfiguration. func NewIndexTemplateDataStreamConfiguration() *IndexTemplateDataStreamConfiguration { r := &IndexTemplateDataStreamConfiguration{} diff --git a/typedapi/types/indextemplateitem.go b/typedapi/types/indextemplateitem.go old mode 100755 new mode 100644 index f18744ef3b..530aabeb28 --- a/typedapi/types/indextemplateitem.go +++ b/typedapi/types/indextemplateitem.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexTemplateItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 type IndexTemplateItem struct { IndexTemplate IndexTemplate `json:"index_template"` Name string `json:"name"` } +func (s *IndexTemplateItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index_template": + if err := dec.Decode(&s.IndexTemplate); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewIndexTemplateItem returns a IndexTemplateItem. func NewIndexTemplateItem() *IndexTemplateItem { r := &IndexTemplateItem{} diff --git a/typedapi/types/indextemplatemapping.go b/typedapi/types/indextemplatemapping.go old mode 100755 new mode 100644 index e79bfa5517..a307759a74 --- a/typedapi/types/indextemplatemapping.go +++ b/typedapi/types/indextemplatemapping.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexTemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L60-L64 type IndexTemplateMapping struct { Aliases map[string]Alias `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` } +func (s *IndexTemplateMapping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]Alias, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + } + } + return nil +} + // NewIndexTemplateMapping returns a IndexTemplateMapping. func NewIndexTemplateMapping() *IndexTemplateMapping { r := &IndexTemplateMapping{ diff --git a/typedapi/types/indextemplatesummary.go b/typedapi/types/indextemplatesummary.go old mode 100755 new mode 100644 index a9afaa2df0..4202b21b7e --- a/typedapi/types/indextemplatesummary.go +++ b/typedapi/types/indextemplatesummary.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexTemplate.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexTemplate.ts#L52-L56 type IndexTemplateSummary struct { Aliases map[string]Alias `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` } +func (s *IndexTemplateSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]Alias, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + } + } + return nil +} + // NewIndexTemplateSummary returns a IndexTemplateSummary. func NewIndexTemplateSummary() *IndexTemplateSummary { r := &IndexTemplateSummary{ diff --git a/typedapi/types/indexversioning.go b/typedapi/types/indexversioning.go old mode 100755 new mode 100644 index e1cbbaffd2..764da3d208 --- a/typedapi/types/indexversioning.go +++ b/typedapi/types/indexversioning.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndexVersioning type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L262-L265 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L262-L265 type IndexVersioning struct { Created *string `json:"created,omitempty"` CreatedString *string `json:"created_string,omitempty"` } +func (s *IndexVersioning) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "created": + if err := dec.Decode(&s.Created); err != nil { + return err + } + + case "created_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreatedString = &o + + } + } + return nil +} + // NewIndexVersioning returns a IndexVersioning. func NewIndexVersioning() *IndexVersioning { r := &IndexVersioning{} diff --git a/typedapi/types/indicatornode.go b/typedapi/types/indicatornode.go new file mode 100644 index 0000000000..c7fc72bc5d --- /dev/null +++ b/typedapi/types/indicatornode.go @@ -0,0 +1,79 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + +// IndicatorNode type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L89-L92 +type IndicatorNode struct { + Name string `json:"name,omitempty"` + NodeId string `json:"node_id,omitempty"` +} + +func (s *IndicatorNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = o + + case "node_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeId = o + + } + } + return nil +} + +// NewIndicatorNode returns a IndicatorNode. +func NewIndicatorNode() *IndicatorNode { + r := &IndicatorNode{} + + return r +} diff --git a/typedapi/types/indicators.go b/typedapi/types/indicators.go new file mode 100644 index 0000000000..9157c40949 --- /dev/null +++ b/typedapi/types/indicators.go @@ -0,0 +1,40 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// Indicators type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L32-L39 +type Indicators struct { + Disk *DiskIndicator `json:"disk,omitempty"` + Ilm *IlmIndicator `json:"ilm,omitempty"` + MasterIsStable *MasterIsStableIndicator `json:"master_is_stable,omitempty"` + RepositoryIntegrity *RepositoryIntegrityIndicator `json:"repository_integrity,omitempty"` + ShardsAvailability *ShardsAvailabilityIndicator `json:"shards_availability,omitempty"` + Slm *SlmIndicator `json:"slm,omitempty"` +} + +// NewIndicators returns a Indicators. +func NewIndicators() *Indicators { + r := &Indicators{} + + return r +} diff --git a/typedapi/types/indices.go b/typedapi/types/indices.go old mode 100755 new mode 100644 index 1d108aa035..0a3888ab8f --- a/typedapi/types/indices.go +++ b/typedapi/types/indices.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Indices type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L61-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L61-L61 type Indices []string diff --git a/typedapi/types/indicesaction.go b/typedapi/types/indicesaction.go old mode 100755 new mode 100644 index 268917a8af..b41b0906fe --- a/typedapi/types/indicesaction.go +++ b/typedapi/types/indicesaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndicesAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/types.ts#L23-L28 type IndicesAction struct { Add *AddAction `json:"add,omitempty"` Remove *RemoveAction `json:"remove,omitempty"` diff --git a/typedapi/types/indicesblockstatus.go b/typedapi/types/indicesblockstatus.go old mode 100755 new mode 100644 index 1962804559..3a89f5751f --- a/typedapi/types/indicesblockstatus.go +++ b/typedapi/types/indicesblockstatus.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndicesBlockStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 type IndicesBlockStatus struct { Blocked bool `json:"blocked"` Name string `json:"name"` } +func (s *IndicesBlockStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "blocked": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Blocked = value + case bool: + s.Blocked = v + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesBlockStatus returns a IndicesBlockStatus. func NewIndicesBlockStatus() *IndicesBlockStatus { r := &IndicesBlockStatus{} diff --git a/typedapi/types/indicesindexingpressure.go b/typedapi/types/indicesindexingpressure.go old mode 100755 new mode 100644 index 161fa51f4b..d487e186ae --- a/typedapi/types/indicesindexingpressure.go +++ b/typedapi/types/indicesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndicesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L540-L542 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L540-L542 type IndicesIndexingPressure struct { Memory IndicesIndexingPressureMemory `json:"memory"` } diff --git a/typedapi/types/indicesindexingpressurememory.go b/typedapi/types/indicesindexingpressurememory.go old mode 100755 new mode 100644 index c56b5b020d..82ec02afda --- a/typedapi/types/indicesindexingpressurememory.go +++ b/typedapi/types/indicesindexingpressurememory.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndicesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L544-L551 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L544-L551 type IndicesIndexingPressureMemory struct { // Limit Number of outstanding bytes that may be consumed by indexing requests. When // this limit is reached or exceeded, @@ -32,6 +42,42 @@ type IndicesIndexingPressureMemory struct { Limit *int `json:"limit,omitempty"` } +func (s *IndicesIndexingPressureMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewIndicesIndexingPressureMemory returns a IndicesIndexingPressureMemory. func NewIndicesIndexingPressureMemory() *IndicesIndexingPressureMemory { r := &IndicesIndexingPressureMemory{} diff --git a/typedapi/types/indicesmodifyaction.go b/typedapi/types/indicesmodifyaction.go old mode 100755 new mode 100644 index 947e44470b..0eadca79b8 --- a/typedapi/types/indicesmodifyaction.go +++ b/typedapi/types/indicesmodifyaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndicesModifyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/modify_data_stream/types.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/modify_data_stream/types.ts#L22-L26 type IndicesModifyAction struct { AddBackingIndex *IndexAndDataStreamAction `json:"add_backing_index,omitempty"` RemoveBackingIndex *IndexAndDataStreamAction `json:"remove_backing_index,omitempty"` diff --git a/typedapi/types/indicesoptions.go b/typedapi/types/indicesoptions.go old mode 100755 new mode 100644 index a2dd98daad..4fe63c0f58 --- a/typedapi/types/indicesoptions.go +++ b/typedapi/types/indicesoptions.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/expandwildcard" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndicesOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L297-L324 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L297-L324 type IndicesOptions struct { // AllowNoIndices If false, the request returns an error if any wildcard expression, index // alias, or `_all` value targets only @@ -47,6 +55,84 @@ type IndicesOptions struct { IgnoreUnavailable *bool `json:"ignore_unavailable,omitempty"` } +func (s *IndicesOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_no_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowNoIndices = &value + case bool: + s.AllowNoIndices = &v + } + + case "expand_wildcards": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := &expandwildcard.ExpandWildcard{} + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.ExpandWildcards = append(s.ExpandWildcards, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.ExpandWildcards); err != nil { + return err + } + } + + case "ignore_throttled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreThrottled = &value + case bool: + s.IgnoreThrottled = &v + } + + case "ignore_unavailable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnavailable = &value + case bool: + s.IgnoreUnavailable = &v + } + + } + } + return nil +} + // NewIndicesOptions returns a IndicesOptions. func NewIndicesOptions() *IndicesOptions { r := &IndicesOptions{} diff --git a/typedapi/types/indicesprivileges.go b/typedapi/types/indicesprivileges.go old mode 100755 new mode 100644 index 0bb1eb5986..c432f37daf --- a/typedapi/types/indicesprivileges.go +++ b/typedapi/types/indicesprivileges.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexprivilege" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // IndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L81-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L82-L105 type IndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -36,7 +44,7 @@ type IndicesPrivileges struct { // `allow_restricted_indices`. AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` // FieldSecurity The document fields that the owners of the role have read access to. - FieldSecurity []FieldSecurity `json:"field_security,omitempty"` + FieldSecurity *FieldSecurity `json:"field_security,omitempty"` // Names A list of indices (or index name patterns) to which the permissions in this // entry apply. Names []string `json:"names"` @@ -49,6 +57,71 @@ type IndicesPrivileges struct { Query IndicesPrivilegesQuery `json:"query,omitempty"` } +func (s *IndicesPrivileges) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_restricted_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowRestrictedIndices = &value + case bool: + s.AllowRestrictedIndices = &v + } + + case "field_security": + if err := dec.Decode(&s.FieldSecurity); err != nil { + return err + } + + case "names": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Names = append(s.Names, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { + return err + } + } + + case "privileges": + if err := dec.Decode(&s.Privileges); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesPrivileges returns a IndicesPrivileges. func NewIndicesPrivileges() *IndicesPrivileges { r := &IndicesPrivileges{} diff --git a/typedapi/types/indicesprivilegesquery.go b/typedapi/types/indicesprivilegesquery.go old mode 100755 new mode 100644 index 52d850fb4b..a7aa1342bc --- a/typedapi/types/indicesprivilegesquery.go +++ b/typedapi/types/indicesprivilegesquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ package types // Query // RoleTemplateQuery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L130-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L131-L139 type IndicesPrivilegesQuery interface{} diff --git a/typedapi/types/indicesrecord.go b/typedapi/types/indicesrecord.go old mode 100755 new mode 100644 index 14f96a1ee6..6ec2b0a324 --- a/typedapi/types/indicesrecord.go +++ b/typedapi/types/indicesrecord.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // IndicesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/indices/types.ts#L20-L801 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/indices/types.ts#L20-L801 type IndicesRecord struct { // BulkAvgSizeInBytes average size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -310,6 +317,1154 @@ type IndicesRecord struct { WarmerTotalTime *string `json:"warmer.total_time,omitempty"` } +func (s *IndicesRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgSizeInBytes = &o + + case "bulk.avg_time", "bati", "bulkAvgTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgTime = &o + + case "bulk.total_operations", "bto", "bulkTotalOperation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalOperations = &o + + case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalSizeInBytes = &o + + case "bulk.total_time", "btti", "bulkTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalTime = &o + + case "completion.size", "cs", "completionSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompletionSize = &o + + case "creation.date", "cd": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreationDate = &o + + case "creation.date.string", "cds": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreationDateString = &o + + case "docs.count", "dc", "docsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocsCount = o + + case "docs.deleted", "dd", "docsDeleted": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocsDeleted = o + + case "fielddata.evictions", "fe", "fielddataEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataEvictions = &o + + case "fielddata.memory_size", "fm", "fielddataMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataMemorySize = &o + + case "flush.total", "ft", "flushTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotal = &o + + case "flush.total_time", "ftt", "flushTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotalTime = &o + + case "get.current", "gc", "getCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetCurrent = &o + + case "get.exists_time", "geti", "getExistsTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTime = &o + + case "get.exists_total", "geto", "getExistsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTotal = &o + + case "get.missing_time", "gmti", "getMissingTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTime = &o + + case "get.missing_total", "gmto", "getMissingTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTotal = &o + + case "get.time", "gti", "getTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTime = &o + + case "get.total", "gto", "getTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTotal = &o + + case "health", "h": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Health = &o + + case "index", "i", "idx": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Index = &o + + case "indexing.delete_current", "idc", "indexingDeleteCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteCurrent = &o + + case "indexing.delete_time", "idti", "indexingDeleteTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTime = &o + + case "indexing.delete_total", "idto", "indexingDeleteTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTotal = &o + + case "indexing.index_current", "iic", "indexingIndexCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexCurrent = &o + + case "indexing.index_failed", "iif", "indexingIndexFailed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexFailed = &o + + case "indexing.index_time", "iiti", "indexingIndexTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTime = &o + + case "indexing.index_total", "iito", "indexingIndexTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTotal = &o + + case "memory.total", "tm", "memoryTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MemoryTotal = &o + + case "merges.current", "mc", "mergesCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrent = &o + + case "merges.current_docs", "mcd", "mergesCurrentDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentDocs = &o + + case "merges.current_size", "mcs", "mergesCurrentSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentSize = &o + + case "merges.total", "mt", "mergesTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotal = &o + + case "merges.total_docs", "mtd", "mergesTotalDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalDocs = &o + + case "merges.total_size", "mts", "mergesTotalSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalSize = &o + + case "merges.total_time", "mtt", "mergesTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalTime = &o + + case "pri", "p", "shards.primary", "shardsPrimary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pri = &o + + case "pri.bulk.avg_size_in_bytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriBulkAvgSizeInBytes = &o + + case "pri.bulk.avg_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriBulkAvgTime = &o + + case "pri.bulk.total_operations": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriBulkTotalOperations = &o + + case "pri.bulk.total_size_in_bytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriBulkTotalSizeInBytes = &o + + case "pri.bulk.total_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriBulkTotalTime = &o + + case "pri.completion.size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriCompletionSize = &o + + case "pri.fielddata.evictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriFielddataEvictions = &o + + case "pri.fielddata.memory_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriFielddataMemorySize = &o + + case "pri.flush.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriFlushTotal = &o + + case "pri.flush.total_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriFlushTotalTime = &o + + case "pri.get.current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetCurrent = &o + + case "pri.get.exists_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetExistsTime = &o + + case "pri.get.exists_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetExistsTotal = &o + + case "pri.get.missing_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetMissingTime = &o + + case "pri.get.missing_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetMissingTotal = &o + + case "pri.get.time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetTime = &o + + case "pri.get.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriGetTotal = &o + + case "pri.indexing.delete_current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingDeleteCurrent = &o + + case "pri.indexing.delete_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingDeleteTime = &o + + case "pri.indexing.delete_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingDeleteTotal = &o + + case "pri.indexing.index_current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingIndexCurrent = &o + + case "pri.indexing.index_failed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingIndexFailed = &o + + case "pri.indexing.index_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingIndexTime = &o + + case "pri.indexing.index_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriIndexingIndexTotal = &o + + case "pri.memory.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMemoryTotal = &o + + case "pri.merges.current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesCurrent = &o + + case "pri.merges.current_docs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesCurrentDocs = &o + + case "pri.merges.current_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesCurrentSize = &o + + case "pri.merges.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesTotal = &o + + case "pri.merges.total_docs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesTotalDocs = &o + + case "pri.merges.total_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesTotalSize = &o + + case "pri.merges.total_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriMergesTotalTime = &o + + case "pri.query_cache.evictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriQueryCacheEvictions = &o + + case "pri.query_cache.memory_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriQueryCacheMemorySize = &o + + case "pri.refresh.external_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRefreshExternalTime = &o + + case "pri.refresh.external_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRefreshExternalTotal = &o + + case "pri.refresh.listeners": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRefreshListeners = &o + + case "pri.refresh.time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRefreshTime = &o + + case "pri.refresh.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRefreshTotal = &o + + case "pri.request_cache.evictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRequestCacheEvictions = &o + + case "pri.request_cache.hit_count": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRequestCacheHitCount = &o + + case "pri.request_cache.memory_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRequestCacheMemorySize = &o + + case "pri.request_cache.miss_count": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriRequestCacheMissCount = &o + + case "pri.search.fetch_current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchFetchCurrent = &o + + case "pri.search.fetch_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchFetchTime = &o + + case "pri.search.fetch_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchFetchTotal = &o + + case "pri.search.open_contexts": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchOpenContexts = &o + + case "pri.search.query_current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchQueryCurrent = &o + + case "pri.search.query_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchQueryTime = &o + + case "pri.search.query_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchQueryTotal = &o + + case "pri.search.scroll_current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchScrollCurrent = &o + + case "pri.search.scroll_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchScrollTime = &o + + case "pri.search.scroll_total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSearchScrollTotal = &o + + case "pri.segments.count": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSegmentsCount = &o + + case "pri.segments.fixed_bitset_memory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSegmentsFixedBitsetMemory = &o + + case "pri.segments.index_writer_memory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSegmentsIndexWriterMemory = &o + + case "pri.segments.memory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSegmentsMemory = &o + + case "pri.segments.version_map_memory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSegmentsVersionMapMemory = &o + + case "pri.store.size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriStoreSize = o + + case "pri.suggest.current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSuggestCurrent = &o + + case "pri.suggest.time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSuggestTime = &o + + case "pri.suggest.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriSuggestTotal = &o + + case "pri.warmer.current": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriWarmerCurrent = &o + + case "pri.warmer.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriWarmerTotal = &o + + case "pri.warmer.total_time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PriWarmerTotalTime = &o + + case "query_cache.evictions", "qce", "queryCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheEvictions = &o + + case "query_cache.memory_size", "qcm", "queryCacheMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheMemorySize = &o + + case "refresh.external_time", "reti": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTime = &o + + case "refresh.external_total", "reto": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTotal = &o + + case "refresh.listeners", "rli", "refreshListeners": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshListeners = &o + + case "refresh.time", "rti", "refreshTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTime = &o + + case "refresh.total", "rto", "refreshTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTotal = &o + + case "rep", "r", "shards.replica", "shardsReplica": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Rep = &o + + case "request_cache.evictions", "rce", "requestCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheEvictions = &o + + case "request_cache.hit_count", "rchc", "requestCacheHitCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheHitCount = &o + + case "request_cache.memory_size", "rcm", "requestCacheMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheMemorySize = &o + + case "request_cache.miss_count", "rcmc", "requestCacheMissCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheMissCount = &o + + case "search.fetch_current", "sfc", "searchFetchCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchCurrent = &o + + case "search.fetch_time", "sfti", "searchFetchTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTime = &o + + case "search.fetch_total", "sfto", "searchFetchTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTotal = &o + + case "search.open_contexts", "so", "searchOpenContexts": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchOpenContexts = &o + + case "search.query_current", "sqc", "searchQueryCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryCurrent = &o + + case "search.query_time", "sqti", "searchQueryTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTime = &o + + case "search.query_total", "sqto", "searchQueryTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTotal = &o + + case "search.scroll_current", "scc", "searchScrollCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollCurrent = &o + + case "search.scroll_time", "scti", "searchScrollTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTime = &o + + case "search.scroll_total", "scto", "searchScrollTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTotal = &o + + case "search.throttled", "sth": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchThrottled = &o + + case "segments.count", "sc", "segmentsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsCount = &o + + case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsFixedBitsetMemory = &o + + case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsIndexWriterMemory = &o + + case "segments.memory", "sm", "segmentsMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsMemory = &o + + case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsVersionMapMemory = &o + + case "status", "s": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = &o + + case "store.size", "ss", "storeSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StoreSize = o + + case "suggest.current", "suc", "suggestCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestCurrent = &o + + case "suggest.time", "suti", "suggestTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestTime = &o + + case "suggest.total", "suto", "suggestTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestTotal = &o + + case "uuid", "id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Uuid = &o + + case "warmer.current", "wc", "warmerCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerCurrent = &o + + case "warmer.total", "wto", "warmerTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerTotal = &o + + case "warmer.total_time", "wtt", "warmerTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerTotalTime = &o + + } + } + return nil +} + // NewIndicesRecord returns a IndicesRecord. func NewIndicesRecord() *IndicesRecord { r := &IndicesRecord{} diff --git a/typedapi/types/indicesshardsstats.go b/typedapi/types/indicesshardsstats.go old mode 100755 new mode 100644 index 4fe8b18b15..14fb2f4c97 --- a/typedapi/types/indicesshardsstats.go +++ b/typedapi/types/indicesshardsstats.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IndicesShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L49-L52 type IndicesShardsStats struct { AllFields FieldSummary `json:"all_fields"` Fields map[string]FieldSummary `json:"fields"` } +func (s *IndicesShardsStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "all_fields": + if err := dec.Decode(&s.AllFields); err != nil { + return err + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]FieldSummary, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesShardsStats returns a IndicesShardsStats. func NewIndicesShardsStats() *IndicesShardsStats { r := &IndicesShardsStats{ diff --git a/typedapi/types/indicesshardstats.go b/typedapi/types/indicesshardstats.go old mode 100755 new mode 100644 index 2fb22d419e..4eb030df43 --- a/typedapi/types/indicesshardstats.go +++ b/typedapi/types/indicesshardstats.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // IndicesShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L183-L211 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L183-L211 type IndicesShardStats struct { Bulk *BulkStats `json:"bulk,omitempty"` Commit *ShardCommit `json:"commit,omitempty"` @@ -56,6 +60,159 @@ type IndicesShardStats struct { Warmer *WarmerStats `json:"warmer,omitempty"` } +func (s *IndicesShardStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bulk": + if err := dec.Decode(&s.Bulk); err != nil { + return err + } + + case "commit": + if err := dec.Decode(&s.Commit); err != nil { + return err + } + + case "completion": + if err := dec.Decode(&s.Completion); err != nil { + return err + } + + case "docs": + if err := dec.Decode(&s.Docs); err != nil { + return err + } + + case "fielddata": + if err := dec.Decode(&s.Fielddata); err != nil { + return err + } + + case "flush": + if err := dec.Decode(&s.Flush); err != nil { + return err + } + + case "get": + if err := dec.Decode(&s.Get); err != nil { + return err + } + + case "indexing": + if err := dec.Decode(&s.Indexing); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "merges": + if err := dec.Decode(&s.Merges); err != nil { + return err + } + + case "query_cache": + if err := dec.Decode(&s.QueryCache); err != nil { + return err + } + + case "recovery": + if err := dec.Decode(&s.Recovery); err != nil { + return err + } + + case "refresh": + if err := dec.Decode(&s.Refresh); err != nil { + return err + } + + case "request_cache": + if err := dec.Decode(&s.RequestCache); err != nil { + return err + } + + case "retention_leases": + if err := dec.Decode(&s.RetentionLeases); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "search": + if err := dec.Decode(&s.Search); err != nil { + return err + } + + case "segments": + if err := dec.Decode(&s.Segments); err != nil { + return err + } + + case "seq_no": + if err := dec.Decode(&s.SeqNo); err != nil { + return err + } + + case "shard_path": + if err := dec.Decode(&s.ShardPath); err != nil { + return err + } + + case "shard_stats": + if err := dec.Decode(&s.ShardStats); err != nil { + return err + } + + case "shards": + if s.Shards == nil { + s.Shards = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "store": + if err := dec.Decode(&s.Store); err != nil { + return err + } + + case "translog": + if err := dec.Decode(&s.Translog); err != nil { + return err + } + + case "warmer": + if err := dec.Decode(&s.Warmer); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesShardStats returns a IndicesShardStats. func NewIndicesShardStats() *IndicesShardStats { r := &IndicesShardStats{ diff --git a/typedapi/types/indicesshardstores.go b/typedapi/types/indicesshardstores.go old mode 100755 new mode 100644 index 3a3513dded..d9e1db5ca6 --- a/typedapi/types/indicesshardstores.go +++ b/typedapi/types/indicesshardstores.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IndicesShardStores type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L26-L28 type IndicesShardStores struct { Shards map[string]ShardStoreWrapper `json:"shards"` } diff --git a/typedapi/types/indicesstats.go b/typedapi/types/indicesstats.go old mode 100755 new mode 100644 index f20abaff51..8fee8d7bb4 --- a/typedapi/types/indicesstats.go +++ b/typedapi/types/indicesstats.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexmetadatastate" + + "bytes" + "errors" + "io" + + "encoding/json" ) // IndicesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L92-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L92-L101 type IndicesStats struct { Health *healthstatus.HealthStatus `json:"health,omitempty"` Primaries *IndexStats `json:"primaries,omitempty"` @@ -37,6 +43,59 @@ type IndicesStats struct { Uuid *string `json:"uuid,omitempty"` } +func (s *IndicesStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "health": + if err := dec.Decode(&s.Health); err != nil { + return err + } + + case "primaries": + if err := dec.Decode(&s.Primaries); err != nil { + return err + } + + case "shards": + if s.Shards == nil { + s.Shards = make(map[string][]IndicesShardStats, 0) + } + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesStats returns a IndicesStats. func NewIndicesStats() *IndicesStats { r := &IndicesStats{ diff --git a/typedapi/types/indicesvalidationexplanation.go b/typedapi/types/indicesvalidationexplanation.go old mode 100755 new mode 100644 index 431274ceae..1b4f9770a9 --- a/typedapi/types/indicesvalidationexplanation.go +++ b/typedapi/types/indicesvalidationexplanation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndicesValidationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 type IndicesValidationExplanation struct { Error *string `json:"error,omitempty"` Explanation *string `json:"explanation,omitempty"` @@ -30,6 +40,61 @@ type IndicesValidationExplanation struct { Valid bool `json:"valid"` } +func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "error": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Error = &o + + case "explanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Explanation = &o + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "valid": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Valid = value + case bool: + s.Valid = v + } + + } + } + return nil +} + // NewIndicesValidationExplanation returns a IndicesValidationExplanation. func NewIndicesValidationExplanation() *IndicesValidationExplanation { r := &IndicesValidationExplanation{} diff --git a/typedapi/types/indicesversions.go b/typedapi/types/indicesversions.go old mode 100755 new mode 100644 index e6cbbe0a8f..b3dcc3935a --- a/typedapi/types/indicesversions.go +++ b/typedapi/types/indicesversions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IndicesVersions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L144-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L144-L149 type IndicesVersions struct { IndexCount int `json:"index_count"` PrimaryShardCount int `json:"primary_shard_count"` @@ -30,6 +40,78 @@ type IndicesVersions struct { Version string `json:"version"` } +func (s *IndicesVersions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IndexCount = value + case float64: + f := int(v) + s.IndexCount = f + } + + case "primary_shard_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrimaryShardCount = value + case float64: + f := int(v) + s.PrimaryShardCount = f + } + + case "total_primary_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalPrimaryBytes = value + case float64: + f := int64(v) + s.TotalPrimaryBytes = f + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIndicesVersions returns a IndicesVersions. func NewIndicesVersions() *IndicesVersions { r := &IndicesVersions{} diff --git a/typedapi/types/inferenceaggregate.go b/typedapi/types/inferenceaggregate.go old mode 100755 new mode 100644 index 6ae3a3bfe8..c33316e69a --- a/typedapi/types/inferenceaggregate.go +++ b/typedapi/types/inferenceaggregate.go @@ -16,27 +16,93 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // InferenceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L650-L661 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L659-L670 type InferenceAggregate struct { Data map[string]json.RawMessage `json:"-"` FeatureImportance []InferenceFeatureImportance `json:"feature_importance,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` TopClasses []InferenceTopClassEntry `json:"top_classes,omitempty"` Value FieldValue `json:"value,omitempty"` Warning *string `json:"warning,omitempty"` } +func (s *InferenceAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_importance": + if err := dec.Decode(&s.FeatureImportance); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "top_classes": + if err := dec.Decode(&s.TopClasses); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "warning": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Warning = &o + + default: + + if key, ok := t.(string); ok { + if s.Data == nil { + s.Data = make(map[string]json.RawMessage, 0) + } + raw := new(json.RawMessage) + if err := dec.Decode(&raw); err != nil { + return err + } + s.Data[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s InferenceAggregate) MarshalJSON() ([]byte, error) { type opt InferenceAggregate @@ -56,6 +122,7 @@ func (s InferenceAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Data { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Data") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/inferenceaggregation.go b/typedapi/types/inferenceaggregation.go old mode 100755 new mode 100644 index 3c79af779d..803ab976ec --- a/typedapi/types/inferenceaggregation.go +++ b/typedapi/types/inferenceaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,19 +32,20 @@ import ( // InferenceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L171-L174 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L171-L174 type InferenceAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - InferenceConfig *InferenceConfigContainer `json:"inference_config,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - ModelId string `json:"model_id"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + InferenceConfig *InferenceConfigContainer `json:"inference_config,omitempty"` + Meta Metadata `json:"meta,omitempty"` + ModelId string `json:"model_id"` + Name *string `json:"name,omitempty"` } func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,9 +65,12 @@ func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -89,9 +93,12 @@ func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/inferenceclassimportance.go b/typedapi/types/inferenceclassimportance.go old mode 100755 new mode 100644 index f5b1b44946..9386bfefb3 --- a/typedapi/types/inferenceclassimportance.go +++ b/typedapi/types/inferenceclassimportance.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L675-L678 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L684-L687 type InferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` } +func (s *InferenceClassImportance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClassName = o + + case "importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Importance = f + case float64: + f := Float64(v) + s.Importance = f + } + + } + } + return nil +} + // NewInferenceClassImportance returns a InferenceClassImportance. func NewInferenceClassImportance() *InferenceClassImportance { r := &InferenceClassImportance{} diff --git a/typedapi/types/inferenceconfig.go b/typedapi/types/inferenceconfig.go old mode 100755 new mode 100644 index fcaaa96c8f..5e34dd515d --- a/typedapi/types/inferenceconfig.go +++ b/typedapi/types/inferenceconfig.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InferenceConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L244-L250 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L244-L250 type InferenceConfig struct { Classification *InferenceConfigClassification `json:"classification,omitempty"` Regression *InferenceConfigRegression `json:"regression,omitempty"` diff --git a/typedapi/types/inferenceconfigclassification.go b/typedapi/types/inferenceconfigclassification.go old mode 100755 new mode 100644 index de7aa8a32b..2d529fc978 --- a/typedapi/types/inferenceconfigclassification.go +++ b/typedapi/types/inferenceconfigclassification.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceConfigClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L257-L263 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L257-L263 type InferenceConfigClassification struct { NumTopClasses *int `json:"num_top_classes,omitempty"` NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -31,6 +41,76 @@ type InferenceConfigClassification struct { TopClassesResultsField *string `json:"top_classes_results_field,omitempty"` } +func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "prediction_field_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PredictionFieldType = &o + + case "results_field": + if err := dec.Decode(&s.ResultsField); err != nil { + return err + } + + case "top_classes_results_field": + if err := dec.Decode(&s.TopClassesResultsField); err != nil { + return err + } + + } + } + return nil +} + // NewInferenceConfigClassification returns a InferenceConfigClassification. func NewInferenceConfigClassification() *InferenceConfigClassification { r := &InferenceConfigClassification{} diff --git a/typedapi/types/inferenceconfigcontainer.go b/typedapi/types/inferenceconfigcontainer.go old mode 100755 new mode 100644 index f5c4aa4979..f4c1f23aef --- a/typedapi/types/inferenceconfigcontainer.go +++ b/typedapi/types/inferenceconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InferenceConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L176-L182 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L176-L182 type InferenceConfigContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigcreatecontainer.go b/typedapi/types/inferenceconfigcreatecontainer.go old mode 100755 new mode 100644 index 120da966ff..eb6e96af8f --- a/typedapi/types/inferenceconfigcreatecontainer.go +++ b/typedapi/types/inferenceconfigcreatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InferenceConfigCreateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L23-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L23-L67 type InferenceConfigCreateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigregression.go b/typedapi/types/inferenceconfigregression.go old mode 100755 new mode 100644 index 819b959247..eaece1f6fc --- a/typedapi/types/inferenceconfigregression.go +++ b/typedapi/types/inferenceconfigregression.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceConfigRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L252-L255 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L252-L255 type InferenceConfigRegression struct { NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` ResultsField *string `json:"results_field,omitempty"` } +func (s *InferenceConfigRegression) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "results_field": + if err := dec.Decode(&s.ResultsField); err != nil { + return err + } + + } + } + return nil +} + // NewInferenceConfigRegression returns a InferenceConfigRegression. func NewInferenceConfigRegression() *InferenceConfigRegression { r := &InferenceConfigRegression{} diff --git a/typedapi/types/inferenceconfigupdatecontainer.go b/typedapi/types/inferenceconfigupdatecontainer.go old mode 100755 new mode 100644 index 58cb4b6499..829ce40812 --- a/typedapi/types/inferenceconfigupdatecontainer.go +++ b/typedapi/types/inferenceconfigupdatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InferenceConfigUpdateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L265-L285 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L265-L285 type InferenceConfigUpdateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferencefeatureimportance.go b/typedapi/types/inferencefeatureimportance.go old mode 100755 new mode 100644 index 3d52c27094..dc0629892b --- a/typedapi/types/inferencefeatureimportance.go +++ b/typedapi/types/inferencefeatureimportance.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L669-L673 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L678-L682 type InferenceFeatureImportance struct { Classes []InferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` Importance *Float64 `json:"importance,omitempty"` } +func (s *InferenceFeatureImportance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Importance = &f + case float64: + f := Float64(v) + s.Importance = &f + } + + } + } + return nil +} + // NewInferenceFeatureImportance returns a InferenceFeatureImportance. func NewInferenceFeatureImportance() *InferenceFeatureImportance { r := &InferenceFeatureImportance{} diff --git a/typedapi/types/inferenceprocessor.go b/typedapi/types/inferenceprocessor.go old mode 100755 new mode 100644 index 9d78eaf177..061f9e8be5 --- a/typedapi/types/inferenceprocessor.go +++ b/typedapi/types/inferenceprocessor.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // InferenceProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L237-L242 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L237-L242 type InferenceProcessor struct { Description *string `json:"description,omitempty"` FieldMap map[string]json.RawMessage `json:"field_map,omitempty"` @@ -39,6 +45,92 @@ type InferenceProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field_map": + if s.FieldMap == nil { + s.FieldMap = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.FieldMap); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "inference_config": + if err := dec.Decode(&s.InferenceConfig); err != nil { + return err + } + + case "model_id": + if err := dec.Decode(&s.ModelId); err != nil { + return err + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewInferenceProcessor returns a InferenceProcessor. func NewInferenceProcessor() *InferenceProcessor { r := &InferenceProcessor{ diff --git a/typedapi/types/inferenceresponseresult.go b/typedapi/types/inferenceresponseresult.go old mode 100755 new mode 100644 index 1df5cfcf2c..d69e1fd03d --- a/typedapi/types/inferenceresponseresult.go +++ b/typedapi/types/inferenceresponseresult.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L418-L465 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L418-L465 type InferenceResponseResult struct { // Entities If the model is trained for named entity recognition (NER) tasks, the // response contains the recognized entities. @@ -64,6 +74,108 @@ type InferenceResponseResult struct { Warning *string `json:"warning,omitempty"` } +func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "entities": + if err := dec.Decode(&s.Entities); err != nil { + return err + } + + case "feature_importance": + if err := dec.Decode(&s.FeatureImportance); err != nil { + return err + } + + case "is_truncated": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsTruncated = &value + case bool: + s.IsTruncated = &v + } + + case "predicted_value": + if err := dec.Decode(&s.PredictedValue); err != nil { + return err + } + + case "predicted_value_sequence": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PredictedValueSequence = &o + + case "prediction_probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.PredictionProbability = &f + case float64: + f := Float64(v) + s.PredictionProbability = &f + } + + case "prediction_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.PredictionScore = &f + case float64: + f := Float64(v) + s.PredictionScore = &f + } + + case "top_classes": + if err := dec.Decode(&s.TopClasses); err != nil { + return err + } + + case "warning": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Warning = &o + + } + } + return nil +} + // NewInferenceResponseResult returns a InferenceResponseResult. func NewInferenceResponseResult() *InferenceResponseResult { r := &InferenceResponseResult{} diff --git a/typedapi/types/inferencetopclassentry.go b/typedapi/types/inferencetopclassentry.go old mode 100755 new mode 100644 index eb54a12d51..b11f7a7a22 --- a/typedapi/types/inferencetopclassentry.go +++ b/typedapi/types/inferencetopclassentry.go @@ -16,19 +16,86 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InferenceTopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L663-L667 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L672-L676 type InferenceTopClassEntry struct { ClassName FieldValue `json:"class_name"` ClassProbability Float64 `json:"class_probability"` ClassScore Float64 `json:"class_score"` } +func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + if err := dec.Decode(&s.ClassName); err != nil { + return err + } + + case "class_probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ClassProbability = f + case float64: + f := Float64(v) + s.ClassProbability = f + } + + case "class_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ClassScore = f + case float64: + f := Float64(v) + s.ClassScore = f + } + + } + } + return nil +} + // NewInferenceTopClassEntry returns a InferenceTopClassEntry. func NewInferenceTopClassEntry() *InferenceTopClassEntry { r := &InferenceTopClassEntry{} diff --git a/typedapi/types/influence.go b/typedapi/types/influence.go old mode 100755 new mode 100644 index 9bac7906db..bdbbe2ac6c --- a/typedapi/types/influence.go +++ b/typedapi/types/influence.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Influence type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Anomaly.ts#L140-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Anomaly.ts#L140-L143 type Influence struct { InfluencerFieldName string `json:"influencer_field_name"` InfluencerFieldValues []string `json:"influencer_field_values"` diff --git a/typedapi/types/influencer.go b/typedapi/types/influencer.go old mode 100755 new mode 100644 index dcdf6a148b..dc3c7bc49f --- a/typedapi/types/influencer.go +++ b/typedapi/types/influencer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Influencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Influencer.ts#L31-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Influencer.ts#L31-L83 type Influencer struct { // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. @@ -65,6 +75,132 @@ type Influencer struct { Timestamp int64 `json:"timestamp"` } +func (s *Influencer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bucket_span": + if err := dec.Decode(&s.BucketSpan); err != nil { + return err + } + + case "foo": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Foo = &o + + case "influencer_field_name": + if err := dec.Decode(&s.InfluencerFieldName); err != nil { + return err + } + + case "influencer_field_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.InfluencerFieldValue = o + + case "influencer_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.InfluencerScore = f + case float64: + f := Float64(v) + s.InfluencerScore = f + } + + case "initial_influencer_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.InitialInfluencerScore = f + case float64: + f := Float64(v) + s.InitialInfluencerScore = f + } + + case "is_interim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsInterim = value + case bool: + s.IsInterim = v + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Probability = f + case float64: + f := Float64(v) + s.Probability = f + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewInfluencer returns a Influencer. func NewInfluencer() *Influencer { r := &Influencer{} diff --git a/typedapi/types/infofeaturestate.go b/typedapi/types/infofeaturestate.go old mode 100755 new mode 100644 index 196bf98976..84503fddb0 --- a/typedapi/types/infofeaturestate.go +++ b/typedapi/types/infofeaturestate.go @@ -16,18 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // InfoFeatureState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 type InfoFeatureState struct { FeatureName string `json:"feature_name"` Indices []string `json:"indices"` } +func (s *InfoFeatureState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + } + } + return nil +} + // NewInfoFeatureState returns a InfoFeatureState. func NewInfoFeatureState() *InfoFeatureState { r := &InfoFeatureState{} diff --git a/typedapi/types/ingestpipeline.go b/typedapi/types/ingestpipeline.go old mode 100755 new mode 100644 index 66f6cb669e..b805cd2827 --- a/typedapi/types/ingestpipeline.go +++ b/typedapi/types/ingestpipeline.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IngestPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Pipeline.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Pipeline.ts#L23-L28 type IngestPipeline struct { Description *string `json:"description,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` @@ -30,6 +38,49 @@ type IngestPipeline struct { Version *int64 `json:"version,omitempty"` } +func (s *IngestPipeline) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "processors": + if err := dec.Decode(&s.Processors); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewIngestPipeline returns a IngestPipeline. func NewIngestPipeline() *IngestPipeline { r := &IngestPipeline{} diff --git a/typedapi/types/ingesttotal.go b/typedapi/types/ingesttotal.go old mode 100755 new mode 100644 index d226389327..250b4669ae --- a/typedapi/types/ingesttotal.go +++ b/typedapi/types/ingesttotal.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IngestTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L149-L155 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L149-L155 type IngestTotal struct { Count *int64 `json:"count,omitempty"` Current *int64 `json:"current,omitempty"` @@ -31,6 +41,81 @@ type IngestTotal struct { TimeInMillis *int64 `json:"time_in_millis,omitempty"` } +func (s *IngestTotal) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = &value + case float64: + f := int64(v) + s.Current = &f + } + + case "failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Failed = &value + case float64: + f := int64(v) + s.Failed = &f + } + + case "processors": + if err := dec.Decode(&s.Processors); err != nil { + return err + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewIngestTotal returns a IngestTotal. func NewIngestTotal() *IngestTotal { r := &IngestTotal{} diff --git a/typedapi/types/inlineget.go b/typedapi/types/inlineget.go old mode 100755 new mode 100644 index 844301dfac..fc0c5976bb --- a/typedapi/types/inlineget.go +++ b/typedapi/types/inlineget.go @@ -16,18 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // InlineGet type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L286-L295 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L286-L295 type InlineGet struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -38,6 +45,91 @@ type InlineGet struct { Source_ json.RawMessage `json:"_source,omitempty"` } +func (s *InlineGet) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = value + case bool: + s.Found = v + } + + case "_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm_ = &value + case float64: + f := int64(v) + s.PrimaryTerm_ = &f + } + + case "_routing": + if err := dec.Decode(&s.Routing_); err != nil { + return err + } + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + default: + + if key, ok := t.(string); ok { + if s.Metadata == nil { + s.Metadata = make(map[string]json.RawMessage, 0) + } + raw := new(json.RawMessage) + if err := dec.Decode(&raw); err != nil { + return err + } + s.Metadata[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s InlineGet) MarshalJSON() ([]byte, error) { type opt InlineGet @@ -57,6 +149,7 @@ func (s InlineGet) MarshalJSON() ([]byte, error) { for key, value := range s.Metadata { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Metadata") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/inlinegetdictuserdefined.go b/typedapi/types/inlinegetdictuserdefined.go old mode 100755 new mode 100644 index a551a39428..086006c200 --- a/typedapi/types/inlinegetdictuserdefined.go +++ b/typedapi/types/inlinegetdictuserdefined.go @@ -16,18 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // InlineGetDictUserDefined type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L286-L295 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L286-L295 type InlineGetDictUserDefined struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -38,6 +45,94 @@ type InlineGetDictUserDefined struct { Source_ map[string]json.RawMessage `json:"_source"` } +func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = value + case bool: + s.Found = v + } + + case "_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm_ = &value + case float64: + f := int64(v) + s.PrimaryTerm_ = &f + } + + case "_routing": + if err := dec.Decode(&s.Routing_); err != nil { + return err + } + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + case "_source": + if s.Source_ == nil { + s.Source_ = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + default: + + if key, ok := t.(string); ok { + if s.InlineGetDictUserDefined == nil { + s.InlineGetDictUserDefined = make(map[string]json.RawMessage, 0) + } + raw := new(json.RawMessage) + if err := dec.Decode(&raw); err != nil { + return err + } + s.InlineGetDictUserDefined[key] = *raw + } + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s InlineGetDictUserDefined) MarshalJSON() ([]byte, error) { type opt InlineGetDictUserDefined @@ -57,6 +152,7 @@ func (s InlineGetDictUserDefined) MarshalJSON() ([]byte, error) { for key, value := range s.InlineGetDictUserDefined { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "InlineGetDictUserDefined") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/inlinescript.go b/typedapi/types/inlinescript.go old mode 100755 new mode 100644 index 5c9f45972a..ec7cd1e2c3 --- a/typedapi/types/inlinescript.go +++ b/typedapi/types/inlinescript.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/scriptlanguage" + + "bytes" + "errors" + "io" + + "encoding/json" ) // InlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L45-L50 type InlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` @@ -36,6 +40,60 @@ type InlineScript struct { Source string `json:"source"` } +func (s *InlineScript) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Source) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lang": + if err := dec.Decode(&s.Lang); err != nil { + return err + } + + case "options": + if s.Options == nil { + s.Options = make(map[string]string, 0) + } + if err := dec.Decode(&s.Options); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "source": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Source = o + + } + } + return nil +} + // NewInlineScript returns a InlineScript. func NewInlineScript() *InlineScript { r := &InlineScript{ diff --git a/typedapi/types/innerhits.go b/typedapi/types/innerhits.go old mode 100755 new mode 100644 index 8ff67167b8..f46d121f98 --- a/typedapi/types/innerhits.go +++ b/typedapi/types/innerhits.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // InnerHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L106-L124 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L106-L124 type InnerHits struct { Collapse *FieldCollapse `json:"collapse,omitempty"` DocvalueFields []FieldAndFormat `json:"docvalue_fields,omitempty"` @@ -42,6 +52,209 @@ type InnerHits struct { Version *bool `json:"version,omitempty"` } +func (s *InnerHits) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collapse": + if err := dec.Decode(&s.Collapse); err != nil { + return err + } + + case "docvalue_fields": + if err := dec.Decode(&s.DocvalueFields); err != nil { + return err + } + + case "explain": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Explain = &value + case bool: + s.Explain = &v + } + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "from": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.From = &value + case float64: + f := int(v) + s.From = &f + } + + case "highlight": + if err := dec.Decode(&s.Highlight); err != nil { + return err + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "script_fields": + if s.ScriptFields == nil { + s.ScriptFields = make(map[string]ScriptField, 0) + } + if err := dec.Decode(&s.ScriptFields); err != nil { + return err + } + + case "seq_no_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SeqNoPrimaryTerm = &value + case bool: + s.SeqNoPrimaryTerm = &v + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "stored_field": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.StoredField = append(s.StoredField, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredField); err != nil { + return err + } + } + + case "track_scores": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TrackScores = &value + case bool: + s.TrackScores = &v + } + + case "version": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Version = &value + case bool: + s.Version = &v + } + + } + } + return nil +} + // NewInnerHits returns a InnerHits. func NewInnerHits() *InnerHits { r := &InnerHits{ diff --git a/typedapi/types/innerhitsresult.go b/typedapi/types/innerhitsresult.go old mode 100755 new mode 100644 index f3c0a6ea58..3ac19cfad4 --- a/typedapi/types/innerhitsresult.go +++ b/typedapi/types/innerhitsresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InnerHitsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L84-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L84-L86 type InnerHitsResult struct { Hits *HitsMetadata `json:"hits,omitempty"` } diff --git a/typedapi/types/inprogress.go b/typedapi/types/inprogress.go old mode 100755 new mode 100644 index 42087038b5..53dc54d48d --- a/typedapi/types/inprogress.go +++ b/typedapi/types/inprogress.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // InProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 type InProgress struct { Name string `json:"name"` StartTimeMillis int64 `json:"start_time_millis"` @@ -30,6 +38,49 @@ type InProgress struct { Uuid string `json:"uuid"` } +func (s *InProgress) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "start_time_millis": + if err := dec.Decode(&s.StartTimeMillis); err != nil { + return err + } + + case "state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = o + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + } + } + return nil +} + // NewInProgress returns a InProgress. func NewInProgress() *InProgress { r := &InProgress{} diff --git a/typedapi/types/input.go b/typedapi/types/input.go old mode 100755 new mode 100644 index 55b94de833..1ffbdd74b4 --- a/typedapi/types/input.go +++ b/typedapi/types/input.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Input type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L56-L58 type Input struct { FieldNames []string `json:"field_names"` } +func (s *Input) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field_names": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.FieldNames = append(s.FieldNames, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.FieldNames); err != nil { + return err + } + } + + } + } + return nil +} + // NewInput returns a Input. func NewInput() *Input { r := &Input{} diff --git a/typedapi/types/integernumberproperty.go b/typedapi/types/integernumberproperty.go old mode 100755 new mode 100644 index ce4cf009c2..ee739922b3 --- a/typedapi/types/integernumberproperty.go +++ b/typedapi/types/integernumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // IntegerNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L146-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L146-L149 type IntegerNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type IntegerNumberProperty struct { } func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,35 +435,78 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NullValue = &value + case float64: + f := int(v) + s.NullValue = &f } case "on_script_error": @@ -426,6 +515,9 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +805,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +817,39 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/integerrangeproperty.go b/typedapi/types/integerrangeproperty.go old mode 100755 new mode 100644 index 6cd361acda..d2d9dfb7e6 --- a/typedapi/types/integerrangeproperty.go +++ b/typedapi/types/integerrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // IntegerRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L42-L44 type IntegerRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -51,6 +53,7 @@ type IntegerRangeProperty struct { } func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,23 +68,63 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -90,6 +133,9 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -377,28 +423,54 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +758,32 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/intervals.go b/typedapi/types/intervals.go old mode 100755 new mode 100644 index e4b3711b5f..981b0b9adf --- a/typedapi/types/intervals.go +++ b/typedapi/types/intervals.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Intervals type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L63-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L63-L72 type Intervals struct { AllOf *IntervalsAllOf `json:"all_of,omitempty"` AnyOf *IntervalsAnyOf `json:"any_of,omitempty"` diff --git a/typedapi/types/intervalsallof.go b/typedapi/types/intervalsallof.go old mode 100755 new mode 100644 index a8d953c63a..e985104e3e --- a/typedapi/types/intervalsallof.go +++ b/typedapi/types/intervalsallof.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IntervalsAllOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L49-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L49-L56 type IntervalsAllOf struct { Filter *IntervalsFilter `json:"filter,omitempty"` Intervals []Intervals `json:"intervals"` @@ -30,6 +40,66 @@ type IntervalsAllOf struct { Ordered *bool `json:"ordered,omitempty"` } +func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "intervals": + if err := dec.Decode(&s.Intervals); err != nil { + return err + } + + case "max_gaps": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGaps = &value + case float64: + f := int(v) + s.MaxGaps = &f + } + + case "ordered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Ordered = &value + case bool: + s.Ordered = &v + } + + } + } + return nil +} + // NewIntervalsAllOf returns a IntervalsAllOf. func NewIntervalsAllOf() *IntervalsAllOf { r := &IntervalsAllOf{} diff --git a/typedapi/types/intervalsanyof.go b/typedapi/types/intervalsanyof.go old mode 100755 new mode 100644 index ebff4bcb8f..876d7c33ca --- a/typedapi/types/intervalsanyof.go +++ b/typedapi/types/intervalsanyof.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IntervalsAnyOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L58-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L58-L61 type IntervalsAnyOf struct { Filter *IntervalsFilter `json:"filter,omitempty"` Intervals []Intervals `json:"intervals"` diff --git a/typedapi/types/intervalsfilter.go b/typedapi/types/intervalsfilter.go old mode 100755 new mode 100644 index f81db016f4..36497c7ee0 --- a/typedapi/types/intervalsfilter.go +++ b/typedapi/types/intervalsfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IntervalsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L74-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L74-L86 type IntervalsFilter struct { After *Intervals `json:"after,omitempty"` Before *Intervals `json:"before,omitempty"` @@ -35,6 +43,71 @@ type IntervalsFilter struct { Script Script `json:"script,omitempty"` } +func (s *IntervalsFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "after": + if err := dec.Decode(&s.After); err != nil { + return err + } + + case "before": + if err := dec.Decode(&s.Before); err != nil { + return err + } + + case "contained_by": + if err := dec.Decode(&s.ContainedBy); err != nil { + return err + } + + case "containing": + if err := dec.Decode(&s.Containing); err != nil { + return err + } + + case "not_contained_by": + if err := dec.Decode(&s.NotContainedBy); err != nil { + return err + } + + case "not_containing": + if err := dec.Decode(&s.NotContaining); err != nil { + return err + } + + case "not_overlapping": + if err := dec.Decode(&s.NotOverlapping); err != nil { + return err + } + + case "overlapping": + if err := dec.Decode(&s.Overlapping); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsFilter returns a IntervalsFilter. func NewIntervalsFilter() *IntervalsFilter { r := &IntervalsFilter{} diff --git a/typedapi/types/intervalsfuzzy.go b/typedapi/types/intervalsfuzzy.go old mode 100755 new mode 100644 index 6d6d2950ea..09fb18444b --- a/typedapi/types/intervalsfuzzy.go +++ b/typedapi/types/intervalsfuzzy.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IntervalsFuzzy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L88-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L88-L97 type IntervalsFuzzy struct { Analyzer *string `json:"analyzer,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` @@ -32,6 +42,82 @@ type IntervalsFuzzy struct { UseField *string `json:"use_field,omitempty"` } +func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "term": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Term = o + + case "transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Transpositions = &value + case bool: + s.Transpositions = &v + } + + case "use_field": + if err := dec.Decode(&s.UseField); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsFuzzy returns a IntervalsFuzzy. func NewIntervalsFuzzy() *IntervalsFuzzy { r := &IntervalsFuzzy{} diff --git a/typedapi/types/intervalsmatch.go b/typedapi/types/intervalsmatch.go old mode 100755 new mode 100644 index 03f369ed82..b396efcacc --- a/typedapi/types/intervalsmatch.go +++ b/typedapi/types/intervalsmatch.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IntervalsMatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L99-L108 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L99-L108 type IntervalsMatch struct { Analyzer *string `json:"analyzer,omitempty"` Filter *IntervalsFilter `json:"filter,omitempty"` @@ -32,6 +42,82 @@ type IntervalsMatch struct { UseField *string `json:"use_field,omitempty"` } +func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "max_gaps": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGaps = &value + case float64: + f := int(v) + s.MaxGaps = &f + } + + case "ordered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Ordered = &value + case bool: + s.Ordered = &v + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "use_field": + if err := dec.Decode(&s.UseField); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsMatch returns a IntervalsMatch. func NewIntervalsMatch() *IntervalsMatch { r := &IntervalsMatch{} diff --git a/typedapi/types/intervalsprefix.go b/typedapi/types/intervalsprefix.go old mode 100755 new mode 100644 index c4e03507ea..ad99704d81 --- a/typedapi/types/intervalsprefix.go +++ b/typedapi/types/intervalsprefix.go @@ -16,19 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IntervalsPrefix type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L110-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L110-L114 type IntervalsPrefix struct { Analyzer *string `json:"analyzer,omitempty"` Prefix string `json:"prefix"` UseField *string `json:"use_field,omitempty"` } +func (s *IntervalsPrefix) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "prefix": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Prefix = o + + case "use_field": + if err := dec.Decode(&s.UseField); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsPrefix returns a IntervalsPrefix. func NewIntervalsPrefix() *IntervalsPrefix { r := &IntervalsPrefix{} diff --git a/typedapi/types/intervalsquery.go b/typedapi/types/intervalsquery.go old mode 100755 new mode 100644 index d1088c9470..247486e48a --- a/typedapi/types/intervalsquery.go +++ b/typedapi/types/intervalsquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IntervalsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L116-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L116-L125 type IntervalsQuery struct { AllOf *IntervalsAllOf `json:"all_of,omitempty"` AnyOf *IntervalsAnyOf `json:"any_of,omitempty"` @@ -34,6 +44,80 @@ type IntervalsQuery struct { Wildcard *IntervalsWildcard `json:"wildcard,omitempty"` } +func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "all_of": + if err := dec.Decode(&s.AllOf); err != nil { + return err + } + + case "any_of": + if err := dec.Decode(&s.AnyOf); err != nil { + return err + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "fuzzy": + if err := dec.Decode(&s.Fuzzy); err != nil { + return err + } + + case "match": + if err := dec.Decode(&s.Match); err != nil { + return err + } + + case "prefix": + if err := dec.Decode(&s.Prefix); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "wildcard": + if err := dec.Decode(&s.Wildcard); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsQuery returns a IntervalsQuery. func NewIntervalsQuery() *IntervalsQuery { r := &IntervalsQuery{} diff --git a/typedapi/types/intervalswildcard.go b/typedapi/types/intervalswildcard.go old mode 100755 new mode 100644 index 6b3f9a298d..049aae9a3c --- a/typedapi/types/intervalswildcard.go +++ b/typedapi/types/intervalswildcard.go @@ -16,19 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // IntervalsWildcard type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L127-L131 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L127-L131 type IntervalsWildcard struct { Analyzer *string `json:"analyzer,omitempty"` Pattern string `json:"pattern"` UseField *string `json:"use_field,omitempty"` } +func (s *IntervalsWildcard) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "use_field": + if err := dec.Decode(&s.UseField); err != nil { + return err + } + + } + } + return nil +} + // NewIntervalsWildcard returns a IntervalsWildcard. func NewIntervalsWildcard() *IntervalsWildcard { r := &IntervalsWildcard{} diff --git a/typedapi/types/invertedindex.go b/typedapi/types/invertedindex.go old mode 100755 new mode 100644 index cec7ce5149..cec46a1366 --- a/typedapi/types/invertedindex.go +++ b/typedapi/types/invertedindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // InvertedIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L65-L73 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L65-L73 type InvertedIndex struct { Offsets uint `json:"offsets"` Payloads uint `json:"payloads"` diff --git a/typedapi/types/invocation.go b/typedapi/types/invocation.go old mode 100755 new mode 100644 index b406040c7e..44aba968f0 --- a/typedapi/types/invocation.go +++ b/typedapi/types/invocation.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Invocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 type Invocation struct { SnapshotName string `json:"snapshot_name"` Time DateTime `json:"time"` } +func (s *Invocation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "snapshot_name": + if err := dec.Decode(&s.SnapshotName); err != nil { + return err + } + + case "time": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + } + } + return nil +} + // NewInvocation returns a Invocation. func NewInvocation() *Invocation { r := &Invocation{} diff --git a/typedapi/types/invocations.go b/typedapi/types/invocations.go old mode 100755 new mode 100644 index e4c0e3b245..cb5bf6494d --- a/typedapi/types/invocations.go +++ b/typedapi/types/invocations.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Invocations type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L44-L46 type Invocations struct { Total int64 `json:"total"` } +func (s *Invocations) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewInvocations returns a Invocations. func NewInvocations() *Invocations { r := &Invocations{} diff --git a/typedapi/types/iostatdevice.go b/typedapi/types/iostatdevice.go old mode 100755 new mode 100644 index 2a0f3e5cd7..888f64b3a8 --- a/typedapi/types/iostatdevice.go +++ b/typedapi/types/iostatdevice.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IoStatDevice type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L298-L305 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L298-L305 type IoStatDevice struct { DeviceName *string `json:"device_name,omitempty"` Operations *int64 `json:"operations,omitempty"` @@ -32,6 +42,109 @@ type IoStatDevice struct { WriteOperations *int64 `json:"write_operations,omitempty"` } +func (s *IoStatDevice) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "device_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DeviceName = &o + + case "operations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Operations = &value + case float64: + f := int64(v) + s.Operations = &f + } + + case "read_kilobytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReadKilobytes = &value + case float64: + f := int64(v) + s.ReadKilobytes = &f + } + + case "read_operations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReadOperations = &value + case float64: + f := int64(v) + s.ReadOperations = &f + } + + case "write_kilobytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.WriteKilobytes = &value + case float64: + f := int64(v) + s.WriteKilobytes = &f + } + + case "write_operations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.WriteOperations = &value + case float64: + f := int64(v) + s.WriteOperations = &f + } + + } + } + return nil +} + // NewIoStatDevice returns a IoStatDevice. func NewIoStatDevice() *IoStatDevice { r := &IoStatDevice{} diff --git a/typedapi/types/iostats.go b/typedapi/types/iostats.go old mode 100755 new mode 100644 index 1719859b93..dac31c90e0 --- a/typedapi/types/iostats.go +++ b/typedapi/types/iostats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // IoStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L293-L296 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L293-L296 type IoStats struct { Devices []IoStatDevice `json:"devices,omitempty"` Total *IoStatDevice `json:"total,omitempty"` diff --git a/typedapi/types/ipfilter.go b/typedapi/types/ipfilter.go old mode 100755 new mode 100644 index 3921fa1f97..a3fa5534b4 --- a/typedapi/types/ipfilter.go +++ b/typedapi/types/ipfilter.go @@ -16,18 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // IpFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L167-L170 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L167-L170 type IpFilter struct { Http bool `json:"http"` Transport bool `json:"transport"` } +func (s *IpFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "http": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Http = value + case bool: + s.Http = v + } + + case "transport": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Transport = value + case bool: + s.Transport = v + } + + } + } + return nil +} + // NewIpFilter returns a IpFilter. func NewIpFilter() *IpFilter { r := &IpFilter{} diff --git a/typedapi/types/ipprefixaggregate.go b/typedapi/types/ipprefixaggregate.go old mode 100755 new mode 100644 index bbeaeda50d..ec7ac62b47 --- a/typedapi/types/ipprefixaggregate.go +++ b/typedapi/types/ipprefixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // IpPrefixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L628-L629 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L629-L630 type IpPrefixAggregate struct { - Buckets BucketsIpPrefixBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsIpPrefixBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *IpPrefixAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *IpPrefixAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]IpPrefixBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []IpPrefixBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/ipprefixaggregation.go b/typedapi/types/ipprefixaggregation.go old mode 100755 new mode 100644 index 398c2c9e7f..9db64354d6 --- a/typedapi/types/ipprefixaggregation.go +++ b/typedapi/types/ipprefixaggregation.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // IpPrefixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L514-L543 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L514-L543 type IpPrefixAggregation struct { // AppendPrefixLength Defines whether the prefix length is appended to IP address keys in the // response. @@ -38,8 +44,8 @@ type IpPrefixAggregation struct { IsIpv6 *bool `json:"is_ipv6,omitempty"` // Keyed Defines whether buckets are returned as a hash rather than an array in the // response. - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` // MinDocCount Minimum number of documents for buckets to be included in the response. MinDocCount *int64 `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` @@ -49,6 +55,117 @@ type IpPrefixAggregation struct { PrefixLength int `json:"prefix_length"` } +func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "append_prefix_length": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AppendPrefixLength = &value + case bool: + s.AppendPrefixLength = &v + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "is_ipv6": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsIpv6 = &value + case bool: + s.IsIpv6 = &v + } + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int64(v) + s.MinDocCount = &f + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = value + case float64: + f := int(v) + s.PrefixLength = f + } + + } + } + return nil +} + // NewIpPrefixAggregation returns a IpPrefixAggregation. func NewIpPrefixAggregation() *IpPrefixAggregation { r := &IpPrefixAggregation{} diff --git a/typedapi/types/ipprefixbucket.go b/typedapi/types/ipprefixbucket.go old mode 100755 new mode 100644 index 17855f7164..c0caca3a70 --- a/typedapi/types/ipprefixbucket.go +++ b/typedapi/types/ipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // IpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L631-L636 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L632-L637 type IpPrefixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -45,6 +47,7 @@ type IpPrefixBucket struct { } func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -58,471 +61,578 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "is_ipv6": - if err := dec.Decode(&s.IsIpv6); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsIpv6 = value + case bool: + s.IsIpv6 = v } case "key": - if err := dec.Decode(&s.Key); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = o case "netmask": - if err := dec.Decode(&s.Netmask); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Netmask = &o case "prefix_length": - if err := dec.Decode(&s.PrefixLength); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = value + case float64: + f := int(v) + s.PrefixLength = f + } + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } } } @@ -549,6 +659,7 @@ func (s IpPrefixBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/ipproperty.go b/typedapi/types/ipproperty.go old mode 100755 new mode 100644 index c837e1d8ac..e94814b03b --- a/typedapi/types/ipproperty.go +++ b/typedapi/types/ipproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // IpProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L59-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L59-L72 type IpProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -58,6 +60,7 @@ type IpProperty struct { } func (s *IpProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -72,18 +75,49 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -92,6 +126,9 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -379,36 +416,71 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.NullValue = &o case "on_script_error": if err := dec.Decode(&s.OnScriptError); err != nil { @@ -416,6 +488,9 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -703,7 +778,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -715,18 +790,39 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "type": diff --git a/typedapi/types/iprangeaggregate.go b/typedapi/types/iprangeaggregate.go old mode 100755 new mode 100644 index f1efabd619..049c91c360 --- a/typedapi/types/iprangeaggregate.go +++ b/typedapi/types/iprangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // IpRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L555-L557 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L556-L558 type IpRangeAggregate struct { - Buckets BucketsIpRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsIpRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *IpRangeAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *IpRangeAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]IpRangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []IpRangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/iprangeaggregation.go b/typedapi/types/iprangeaggregation.go old mode 100755 new mode 100644 index c185783238..097226c71d --- a/typedapi/types/iprangeaggregation.go +++ b/typedapi/types/iprangeaggregation.go @@ -16,22 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // IpRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L249-L252 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L249-L252 type IpRangeAggregation struct { - Field *string `json:"field,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Ranges []IpRangeAggregationRange `json:"ranges,omitempty"` + Field *string `json:"field,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Ranges []IpRangeAggregationRange `json:"ranges,omitempty"` +} + +func (s *IpRangeAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "ranges": + if err := dec.Decode(&s.Ranges); err != nil { + return err + } + + } + } + return nil } // NewIpRangeAggregation returns a IpRangeAggregation. diff --git a/typedapi/types/iprangeaggregationrange.go b/typedapi/types/iprangeaggregationrange.go old mode 100755 new mode 100644 index fe8b8b36ad..63af4d18ea --- a/typedapi/types/iprangeaggregationrange.go +++ b/typedapi/types/iprangeaggregationrange.go @@ -16,19 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // IpRangeAggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L254-L258 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L254-L258 type IpRangeAggregationRange struct { From string `json:"from,omitempty"` Mask *string `json:"mask,omitempty"` To string `json:"to,omitempty"` } +func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.From = o + + case "mask": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Mask = &o + + case "to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.To = o + + } + } + return nil +} + // NewIpRangeAggregationRange returns a IpRangeAggregationRange. func NewIpRangeAggregationRange() *IpRangeAggregationRange { r := &IpRangeAggregationRange{} diff --git a/typedapi/types/iprangebucket.go b/typedapi/types/iprangebucket.go old mode 100755 new mode 100644 index 68fdc1981d..24c2fe4715 --- a/typedapi/types/iprangebucket.go +++ b/typedapi/types/iprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // IpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L559-L563 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L560-L564 type IpRangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -44,6 +46,7 @@ type IpRangeBucket struct { } func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,467 +60,557 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "from": - if err := dec.Decode(&s.From); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.From = &o case "key": - if err := dec.Decode(&s.Key); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = &o case "to": - if err := dec.Decode(&s.To); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.To = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -543,6 +636,7 @@ func (s IpRangeBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/iprangeproperty.go b/typedapi/types/iprangeproperty.go old mode 100755 new mode 100644 index 55e05d3ae0..c56f79b715 --- a/typedapi/types/iprangeproperty.go +++ b/typedapi/types/iprangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // IpRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L46-L48 type IpRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -51,6 +53,7 @@ type IpRangeProperty struct { } func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,23 +68,63 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -90,6 +133,9 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -377,28 +423,54 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +758,32 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/job.go b/typedapi/types/job.go old mode 100755 new mode 100644 index 86f3aaa775..9a2911b2df --- a/typedapi/types/job.go +++ b/typedapi/types/job.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // Job type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L51-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L51-L75 type Job struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig AnalysisConfig `json:"analysis_config"` @@ -53,6 +59,205 @@ type Job struct { ResultsRetentionDays *int64 `json:"results_retention_days,omitempty"` } +func (s *Job) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_lazy_open": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowLazyOpen = value + case bool: + s.AllowLazyOpen = v + } + + case "analysis_config": + if err := dec.Decode(&s.AnalysisConfig); err != nil { + return err + } + + case "analysis_limits": + if err := dec.Decode(&s.AnalysisLimits); err != nil { + return err + } + + case "background_persist_interval": + if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { + return err + } + + case "blocked": + if err := dec.Decode(&s.Blocked); err != nil { + return err + } + + case "create_time": + if err := dec.Decode(&s.CreateTime); err != nil { + return err + } + + case "custom_settings": + if err := dec.Decode(&s.CustomSettings); err != nil { + return err + } + + case "daily_model_snapshot_retention_after_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DailyModelSnapshotRetentionAfterDays = &value + case float64: + f := int64(v) + s.DailyModelSnapshotRetentionAfterDays = &f + } + + case "data_description": + if err := dec.Decode(&s.DataDescription); err != nil { + return err + } + + case "datafeed_config": + if err := dec.Decode(&s.DatafeedConfig); err != nil { + return err + } + + case "deleting": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Deleting = &value + case bool: + s.Deleting = &v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "finished_time": + if err := dec.Decode(&s.FinishedTime); err != nil { + return err + } + + case "groups": + if err := dec.Decode(&s.Groups); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "job_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.JobType = &o + + case "job_version": + if err := dec.Decode(&s.JobVersion); err != nil { + return err + } + + case "model_plot_config": + if err := dec.Decode(&s.ModelPlotConfig); err != nil { + return err + } + + case "model_snapshot_id": + if err := dec.Decode(&s.ModelSnapshotId); err != nil { + return err + } + + case "model_snapshot_retention_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ModelSnapshotRetentionDays = value + case float64: + f := int64(v) + s.ModelSnapshotRetentionDays = f + } + + case "renormalization_window_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RenormalizationWindowDays = &value + case float64: + f := int64(v) + s.RenormalizationWindowDays = &f + } + + case "results_index_name": + if err := dec.Decode(&s.ResultsIndexName); err != nil { + return err + } + + case "results_retention_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ResultsRetentionDays = &value + case float64: + f := int64(v) + s.ResultsRetentionDays = &f + } + + } + } + return nil +} + // NewJob returns a Job. func NewJob() *Job { r := &Job{} diff --git a/typedapi/types/jobblocked.go b/typedapi/types/jobblocked.go old mode 100755 new mode 100644 index 1c8c49af8e..122811b519 --- a/typedapi/types/jobblocked.go +++ b/typedapi/types/jobblocked.go @@ -16,22 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/jobblockedreason" + + "bytes" + "errors" + "io" + + "encoding/json" ) // JobBlocked type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L169-L172 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L169-L172 type JobBlocked struct { Reason jobblockedreason.JobBlockedReason `json:"reason"` TaskId TaskId `json:"task_id,omitempty"` } +func (s *JobBlocked) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "reason": + if err := dec.Decode(&s.Reason); err != nil { + return err + } + + case "task_id": + if err := dec.Decode(&s.TaskId); err != nil { + return err + } + + } + } + return nil +} + // NewJobBlocked returns a JobBlocked. func NewJobBlocked() *JobBlocked { r := &JobBlocked{} diff --git a/typedapi/types/jobconfig.go b/typedapi/types/jobconfig.go old mode 100755 new mode 100644 index 0ee81c6ad2..b91afe4a09 --- a/typedapi/types/jobconfig.go +++ b/typedapi/types/jobconfig.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // JobConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L77-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L77-L95 type JobConfig struct { AllowLazyOpen *bool `json:"allow_lazy_open,omitempty"` AnalysisConfig AnalysisConfig `json:"analysis_config"` @@ -47,6 +53,166 @@ type JobConfig struct { ResultsRetentionDays *int64 `json:"results_retention_days,omitempty"` } +func (s *JobConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_lazy_open": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowLazyOpen = &value + case bool: + s.AllowLazyOpen = &v + } + + case "analysis_config": + if err := dec.Decode(&s.AnalysisConfig); err != nil { + return err + } + + case "analysis_limits": + if err := dec.Decode(&s.AnalysisLimits); err != nil { + return err + } + + case "background_persist_interval": + if err := dec.Decode(&s.BackgroundPersistInterval); err != nil { + return err + } + + case "custom_settings": + if err := dec.Decode(&s.CustomSettings); err != nil { + return err + } + + case "daily_model_snapshot_retention_after_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DailyModelSnapshotRetentionAfterDays = &value + case float64: + f := int64(v) + s.DailyModelSnapshotRetentionAfterDays = &f + } + + case "data_description": + if err := dec.Decode(&s.DataDescription); err != nil { + return err + } + + case "datafeed_config": + if err := dec.Decode(&s.DatafeedConfig); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "groups": + if err := dec.Decode(&s.Groups); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "job_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.JobType = &o + + case "model_plot_config": + if err := dec.Decode(&s.ModelPlotConfig); err != nil { + return err + } + + case "model_snapshot_retention_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ModelSnapshotRetentionDays = &value + case float64: + f := int64(v) + s.ModelSnapshotRetentionDays = &f + } + + case "renormalization_window_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RenormalizationWindowDays = &value + case float64: + f := int64(v) + s.RenormalizationWindowDays = &f + } + + case "results_index_name": + if err := dec.Decode(&s.ResultsIndexName); err != nil { + return err + } + + case "results_retention_days": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ResultsRetentionDays = &value + case float64: + f := int64(v) + s.ResultsRetentionDays = &f + } + + } + } + return nil +} + // NewJobConfig returns a JobConfig. func NewJobConfig() *JobConfig { r := &JobConfig{} diff --git a/typedapi/types/jobforecaststatistics.go b/typedapi/types/jobforecaststatistics.go old mode 100755 new mode 100644 index 994682400f..59fb936333 --- a/typedapi/types/jobforecaststatistics.go +++ b/typedapi/types/jobforecaststatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JobForecastStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L120-L127 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L120-L127 type JobForecastStatistics struct { ForecastedJobs int `json:"forecasted_jobs"` MemoryBytes *JobStatistics `json:"memory_bytes,omitempty"` @@ -32,6 +42,80 @@ type JobForecastStatistics struct { Total int64 `json:"total"` } +func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "forecasted_jobs": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ForecastedJobs = value + case float64: + f := int(v) + s.ForecastedJobs = f + } + + case "memory_bytes": + if err := dec.Decode(&s.MemoryBytes); err != nil { + return err + } + + case "processing_time_ms": + if err := dec.Decode(&s.ProcessingTimeMs); err != nil { + return err + } + + case "records": + if err := dec.Decode(&s.Records); err != nil { + return err + } + + case "status": + if s.Status == nil { + s.Status = make(map[string]int64, 0) + } + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewJobForecastStatistics returns a JobForecastStatistics. func NewJobForecastStatistics() *JobForecastStatistics { r := &JobForecastStatistics{ diff --git a/typedapi/types/jobsrecord.go b/typedapi/types/jobsrecord.go old mode 100755 new mode 100644 index 3fa37d8599..0fe86a495c --- a/typedapi/types/jobsrecord.go +++ b/typedapi/types/jobsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,11 +24,17 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/categorizationstatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/jobstate" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/memorystatus" + + "bytes" + "errors" + "io" + + "encoding/json" ) // JobsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_jobs/types.ts#L24-L325 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_jobs/types.ts#L24-L325 type JobsRecord struct { // AssignmentExplanation why the job is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` @@ -152,6 +158,479 @@ type JobsRecord struct { State *jobstate.JobState `json:"state,omitempty"` } +func (s *JobsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_explanation", "ae": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = &o + + case "buckets.count", "bc", "bucketsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsCount = &o + + case "buckets.time.exp_avg", "btea", "bucketsTimeExpAvg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsTimeExpAvg = &o + + case "buckets.time.exp_avg_hour", "bteah", "bucketsTimeExpAvgHour": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsTimeExpAvgHour = &o + + case "buckets.time.max", "btmax", "bucketsTimeMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsTimeMax = &o + + case "buckets.time.min", "btmin", "bucketsTimeMin": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsTimeMin = &o + + case "buckets.time.total", "btt", "bucketsTimeTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BucketsTimeTotal = &o + + case "data.buckets", "db", "dataBuckets": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataBuckets = &o + + case "data.earliest_record", "der", "dataEarliestRecord": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataEarliestRecord = &o + + case "data.empty_buckets", "deb", "dataEmptyBuckets": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataEmptyBuckets = &o + + case "data.input_bytes", "dib", "dataInputBytes": + if err := dec.Decode(&s.DataInputBytes); err != nil { + return err + } + + case "data.input_fields", "dif", "dataInputFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataInputFields = &o + + case "data.input_records", "dir", "dataInputRecords": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataInputRecords = &o + + case "data.invalid_dates", "did", "dataInvalidDates": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataInvalidDates = &o + + case "data.last", "dl", "dataLast": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataLast = &o + + case "data.last_empty_bucket", "dleb", "dataLastEmptyBucket": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataLastEmptyBucket = &o + + case "data.last_sparse_bucket", "dlsb", "dataLastSparseBucket": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataLastSparseBucket = &o + + case "data.latest_record", "dlr", "dataLatestRecord": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataLatestRecord = &o + + case "data.missing_fields", "dmf", "dataMissingFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataMissingFields = &o + + case "data.out_of_order_timestamps", "doot", "dataOutOfOrderTimestamps": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataOutOfOrderTimestamps = &o + + case "data.processed_fields", "dpf", "dataProcessedFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataProcessedFields = &o + + case "data.processed_records", "dpr", "dataProcessedRecords": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataProcessedRecords = &o + + case "data.sparse_buckets", "dsb", "dataSparseBuckets": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataSparseBuckets = &o + + case "forecasts.memory.avg", "fmavg", "forecastsMemoryAvg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsMemoryAvg = &o + + case "forecasts.memory.max", "fmmax", "forecastsMemoryMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsMemoryMax = &o + + case "forecasts.memory.min", "fmmin", "forecastsMemoryMin": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsMemoryMin = &o + + case "forecasts.memory.total", "fmt", "forecastsMemoryTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsMemoryTotal = &o + + case "forecasts.records.avg", "fravg", "forecastsRecordsAvg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsRecordsAvg = &o + + case "forecasts.records.max", "frmax", "forecastsRecordsMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsRecordsMax = &o + + case "forecasts.records.min", "frmin", "forecastsRecordsMin": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsRecordsMin = &o + + case "forecasts.records.total", "frt", "forecastsRecordsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsRecordsTotal = &o + + case "forecasts.time.avg", "ftavg", "forecastsTimeAvg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsTimeAvg = &o + + case "forecasts.time.max", "ftmax", "forecastsTimeMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsTimeMax = &o + + case "forecasts.time.min", "ftmin", "forecastsTimeMin": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsTimeMin = &o + + case "forecasts.time.total", "ftt", "forecastsTimeTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsTimeTotal = &o + + case "forecasts.total", "ft", "forecastsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ForecastsTotal = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "model.bucket_allocation_failures", "mbaf", "modelBucketAllocationFailures": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelBucketAllocationFailures = &o + + case "model.by_fields", "mbf", "modelByFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelByFields = &o + + case "model.bytes", "mb", "modelBytes": + if err := dec.Decode(&s.ModelBytes); err != nil { + return err + } + + case "model.bytes_exceeded", "mbe", "modelBytesExceeded": + if err := dec.Decode(&s.ModelBytesExceeded); err != nil { + return err + } + + case "model.categorization_status", "mcs", "modelCategorizationStatus": + if err := dec.Decode(&s.ModelCategorizationStatus); err != nil { + return err + } + + case "model.categorized_doc_count", "mcdc", "modelCategorizedDocCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelCategorizedDocCount = &o + + case "model.dead_category_count", "mdcc", "modelDeadCategoryCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelDeadCategoryCount = &o + + case "model.failed_category_count", "mfcc", "modelFailedCategoryCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelFailedCategoryCount = &o + + case "model.frequent_category_count", "modelFrequentCategoryCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelFrequentCategoryCount = &o + + case "model.log_time", "mlt", "modelLogTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelLogTime = &o + + case "model.memory_limit", "mml", "modelMemoryLimit": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelMemoryLimit = &o + + case "model.memory_status", "mms", "modelMemoryStatus": + if err := dec.Decode(&s.ModelMemoryStatus); err != nil { + return err + } + + case "model.over_fields", "mof", "modelOverFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelOverFields = &o + + case "model.partition_fields", "mpf", "modelPartitionFields": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelPartitionFields = &o + + case "model.rare_category_count", "mrcc", "modelRareCategoryCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelRareCategoryCount = &o + + case "model.timestamp", "mt", "modelTimestamp": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelTimestamp = &o + + case "model.total_category_count", "mtcc", "modelTotalCategoryCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ModelTotalCategoryCount = &o + + case "node.address", "na", "nodeAddress": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeAddress = &o + + case "node.ephemeral_id", "ne", "nodeEphemeralId": + if err := dec.Decode(&s.NodeEphemeralId); err != nil { + return err + } + + case "node.id", "ni", "nodeId": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "node.name", "nn", "nodeName": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeName = &o + + case "opened_time", "ot": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.OpenedTime = &o + + case "state", "s": + if err := dec.Decode(&s.State); err != nil { + return err + } + + } + } + return nil +} + // NewJobsRecord returns a JobsRecord. func NewJobsRecord() *JobsRecord { r := &JobsRecord{} diff --git a/typedapi/types/jobstatistics.go b/typedapi/types/jobstatistics.go old mode 100755 new mode 100644 index 7d800b7b74..a1dfbbc35c --- a/typedapi/types/jobstatistics.go +++ b/typedapi/types/jobstatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JobStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L44-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L44-L49 type JobStatistics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` @@ -30,6 +40,90 @@ type JobStatistics struct { Total Float64 `json:"total"` } +func (s *JobStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Avg = f + case float64: + f := Float64(v) + s.Avg = f + } + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Total = f + case float64: + f := Float64(v) + s.Total = f + } + + } + } + return nil +} + // NewJobStatistics returns a JobStatistics. func NewJobStatistics() *JobStatistics { r := &JobStatistics{} diff --git a/typedapi/types/jobstats.go b/typedapi/types/jobstats.go old mode 100755 new mode 100644 index 3de7dcff30..dca42fb61c --- a/typedapi/types/jobstats.go +++ b/typedapi/types/jobstats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/jobstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // JobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L96-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L96-L107 type JobStats struct { AssignmentExplanation *string `json:"assignment_explanation,omitempty"` DataCounts DataCounts `json:"data_counts"` @@ -40,6 +48,91 @@ type JobStats struct { TimingStats JobTimingStats `json:"timing_stats"` } +func (s *JobStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_explanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = &o + + case "data_counts": + if err := dec.Decode(&s.DataCounts); err != nil { + return err + } + + case "deleting": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Deleting = &value + case bool: + s.Deleting = &v + } + + case "forecasts_stats": + if err := dec.Decode(&s.ForecastsStats); err != nil { + return err + } + + case "job_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.JobId = o + + case "model_size_stats": + if err := dec.Decode(&s.ModelSizeStats); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "open_time": + if err := dec.Decode(&s.OpenTime); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "timing_stats": + if err := dec.Decode(&s.TimingStats); err != nil { + return err + } + + } + } + return nil +} + // NewJobStats returns a JobStats. func NewJobStats() *JobStats { r := &JobStats{} diff --git a/typedapi/types/jobtimingstats.go b/typedapi/types/jobtimingstats.go old mode 100755 new mode 100644 index 2cba128963..f06e6abbca --- a/typedapi/types/jobtimingstats.go +++ b/typedapi/types/jobtimingstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JobTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Job.ts#L109-L118 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Job.ts#L109-L118 type JobTimingStats struct { AverageBucketProcessingTimeMs Float64 `json:"average_bucket_processing_time_ms,omitempty"` BucketCount int64 `json:"bucket_count"` @@ -34,6 +44,76 @@ type JobTimingStats struct { TotalBucketProcessingTimeMs Float64 `json:"total_bucket_processing_time_ms"` } +func (s *JobTimingStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "average_bucket_processing_time_ms": + if err := dec.Decode(&s.AverageBucketProcessingTimeMs); err != nil { + return err + } + + case "bucket_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BucketCount = value + case float64: + f := int64(v) + s.BucketCount = f + } + + case "exponential_average_bucket_processing_time_ms": + if err := dec.Decode(&s.ExponentialAverageBucketProcessingTimeMs); err != nil { + return err + } + + case "exponential_average_bucket_processing_time_per_hour_ms": + if err := dec.Decode(&s.ExponentialAverageBucketProcessingTimePerHourMs); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "maximum_bucket_processing_time_ms": + if err := dec.Decode(&s.MaximumBucketProcessingTimeMs); err != nil { + return err + } + + case "minimum_bucket_processing_time_ms": + if err := dec.Decode(&s.MinimumBucketProcessingTimeMs); err != nil { + return err + } + + case "total_bucket_processing_time_ms": + if err := dec.Decode(&s.TotalBucketProcessingTimeMs); err != nil { + return err + } + + } + } + return nil +} + // NewJobTimingStats returns a JobTimingStats. func NewJobTimingStats() *JobTimingStats { r := &JobTimingStats{} diff --git a/typedapi/types/jobusage.go b/typedapi/types/jobusage.go old mode 100755 new mode 100644 index dfd5ae4f49..14e187f16f --- a/typedapi/types/jobusage.go +++ b/typedapi/types/jobusage.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JobUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L355-L361 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L355-L361 type JobUsage struct { Count int `json:"count"` CreatedBy map[string]int64 `json:"created_by"` @@ -31,6 +41,65 @@ type JobUsage struct { ModelSize JobStatistics `json:"model_size"` } +func (s *JobUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "created_by": + if s.CreatedBy == nil { + s.CreatedBy = make(map[string]int64, 0) + } + if err := dec.Decode(&s.CreatedBy); err != nil { + return err + } + + case "detectors": + if err := dec.Decode(&s.Detectors); err != nil { + return err + } + + case "forecasts": + if err := dec.Decode(&s.Forecasts); err != nil { + return err + } + + case "model_size": + if err := dec.Decode(&s.ModelSize); err != nil { + return err + } + + } + } + return nil +} + // NewJobUsage returns a JobUsage. func NewJobUsage() *JobUsage { r := &JobUsage{ diff --git a/typedapi/types/joinprocessor.go b/typedapi/types/joinprocessor.go old mode 100755 new mode 100644 index 640bb3514d..eebb50bf98 --- a/typedapi/types/joinprocessor.go +++ b/typedapi/types/joinprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JoinProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L265-L269 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L265-L269 type JoinProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,87 @@ type JoinProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *JoinProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewJoinProcessor returns a JoinProcessor. func NewJoinProcessor() *JoinProcessor { r := &JoinProcessor{} diff --git a/typedapi/types/joinproperty.go b/typedapi/types/joinproperty.go old mode 100755 new mode 100644 index 377a484b64..e145407816 --- a/typedapi/types/joinproperty.go +++ b/typedapi/types/joinproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // JoinProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L83-L87 type JoinProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` @@ -46,6 +48,7 @@ type JoinProperty struct { } func (s *JoinProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,11 +68,23 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - if err := dec.Decode(&s.EagerGlobalOrdinals); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EagerGlobalOrdinals = &value + case bool: + s.EagerGlobalOrdinals = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -357,23 +372,40 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -661,15 +693,35 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "relations": - if err := dec.Decode(&s.Relations); err != nil { - return err + if s.Relations == nil { + s.Relations = make(map[string][]string, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := new(string) + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Relations[key] = append(s.Relations[key], *o) + default: + o := []string{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.Relations[key] = o + } } case "type": diff --git a/typedapi/types/jsonprocessor.go b/typedapi/types/jsonprocessor.go old mode 100755 new mode 100644 index 93136def28..f27a78cdd4 --- a/typedapi/types/jsonprocessor.go +++ b/typedapi/types/jsonprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/jsonprocessorconflictstrategy" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // JsonProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L271-L277 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L271-L277 type JsonProcessor struct { AddToRoot *bool `json:"add_to_root,omitempty"` AddToRootConflictStrategy *jsonprocessorconflictstrategy.JsonProcessorConflictStrategy `json:"add_to_root_conflict_strategy,omitempty"` @@ -40,6 +48,112 @@ type JsonProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *JsonProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "add_to_root": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AddToRoot = &value + case bool: + s.AddToRoot = &v + } + + case "add_to_root_conflict_strategy": + if err := dec.Decode(&s.AddToRootConflictStrategy); err != nil { + return err + } + + case "allow_duplicate_keys": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowDuplicateKeys = &value + case bool: + s.AllowDuplicateKeys = &v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewJsonProcessor returns a JsonProcessor. func NewJsonProcessor() *JsonProcessor { r := &JsonProcessor{} diff --git a/typedapi/types/jvm.go b/typedapi/types/jvm.go old mode 100755 new mode 100644 index 24870de92f..a689c19de1 --- a/typedapi/types/jvm.go +++ b/typedapi/types/jvm.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Jvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L324-L333 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L324-L333 type Jvm struct { BufferPools map[string]NodeBufferPool `json:"buffer_pools,omitempty"` Classes *JvmClasses `json:"classes,omitempty"` @@ -34,6 +44,92 @@ type Jvm struct { UptimeInMillis *int64 `json:"uptime_in_millis,omitempty"` } +func (s *Jvm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buffer_pools": + if s.BufferPools == nil { + s.BufferPools = make(map[string]NodeBufferPool, 0) + } + if err := dec.Decode(&s.BufferPools); err != nil { + return err + } + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + case "gc": + if err := dec.Decode(&s.Gc); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "threads": + if err := dec.Decode(&s.Threads); err != nil { + return err + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + case "uptime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Uptime = &o + + case "uptime_in_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UptimeInMillis = &value + case float64: + f := int64(v) + s.UptimeInMillis = &f + } + + } + } + return nil +} + // NewJvm returns a Jvm. func NewJvm() *Jvm { r := &Jvm{ diff --git a/typedapi/types/jvmclasses.go b/typedapi/types/jvmclasses.go old mode 100755 new mode 100644 index 0623d57834..f990729b49 --- a/typedapi/types/jvmclasses.go +++ b/typedapi/types/jvmclasses.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JvmClasses type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L357-L361 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L357-L361 type JvmClasses struct { CurrentLoadedCount *int64 `json:"current_loaded_count,omitempty"` TotalLoadedCount *int64 `json:"total_loaded_count,omitempty"` TotalUnloadedCount *int64 `json:"total_unloaded_count,omitempty"` } +func (s *JvmClasses) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current_loaded_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CurrentLoadedCount = &value + case float64: + f := int64(v) + s.CurrentLoadedCount = &f + } + + case "total_loaded_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalLoadedCount = &value + case float64: + f := int64(v) + s.TotalLoadedCount = &f + } + + case "total_unloaded_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalUnloadedCount = &value + case float64: + f := int64(v) + s.TotalUnloadedCount = &f + } + + } + } + return nil +} + // NewJvmClasses returns a JvmClasses. func NewJvmClasses() *JvmClasses { r := &JvmClasses{} diff --git a/typedapi/types/jvmmemorystats.go b/typedapi/types/jvmmemorystats.go old mode 100755 new mode 100644 index fe52cfaf00..5b4f9bc42a --- a/typedapi/types/jvmmemorystats.go +++ b/typedapi/types/jvmmemorystats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JvmMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L335-L343 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L335-L343 type JvmMemoryStats struct { HeapCommittedInBytes *int64 `json:"heap_committed_in_bytes,omitempty"` HeapMaxInBytes *int64 `json:"heap_max_in_bytes,omitempty"` @@ -33,6 +43,124 @@ type JvmMemoryStats struct { Pools map[string]Pool `json:"pools,omitempty"` } +func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "heap_committed_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapCommittedInBytes = &value + case float64: + f := int64(v) + s.HeapCommittedInBytes = &f + } + + case "heap_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapMaxInBytes = &value + case float64: + f := int64(v) + s.HeapMaxInBytes = &f + } + + case "heap_used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapUsedInBytes = &value + case float64: + f := int64(v) + s.HeapUsedInBytes = &f + } + + case "heap_used_percent": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapUsedPercent = &value + case float64: + f := int64(v) + s.HeapUsedPercent = &f + } + + case "non_heap_committed_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NonHeapCommittedInBytes = &value + case float64: + f := int64(v) + s.NonHeapCommittedInBytes = &f + } + + case "non_heap_used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NonHeapUsedInBytes = &value + case float64: + f := int64(v) + s.NonHeapUsedInBytes = &f + } + + case "pools": + if s.Pools == nil { + s.Pools = make(map[string]Pool, 0) + } + if err := dec.Decode(&s.Pools); err != nil { + return err + } + + } + } + return nil +} + // NewJvmMemoryStats returns a JvmMemoryStats. func NewJvmMemoryStats() *JvmMemoryStats { r := &JvmMemoryStats{ diff --git a/typedapi/types/jvmstats.go b/typedapi/types/jvmstats.go old mode 100755 new mode 100644 index d139bf1150..4e5363600f --- a/typedapi/types/jvmstats.go +++ b/typedapi/types/jvmstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JvmStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_memory_stats/types.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_memory_stats/types.ts#L50-L63 type JvmStats struct { // HeapMax Maximum amount of memory available for use by the heap. HeapMax ByteSize `json:"heap_max,omitempty"` @@ -40,6 +50,89 @@ type JvmStats struct { JavaInferenceMaxInBytes int `json:"java_inference_max_in_bytes"` } +func (s *JvmStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "heap_max": + if err := dec.Decode(&s.HeapMax); err != nil { + return err + } + + case "heap_max_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.HeapMaxInBytes = value + case float64: + f := int(v) + s.HeapMaxInBytes = f + } + + case "java_inference": + if err := dec.Decode(&s.JavaInference); err != nil { + return err + } + + case "java_inference_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.JavaInferenceInBytes = value + case float64: + f := int(v) + s.JavaInferenceInBytes = f + } + + case "java_inference_max": + if err := dec.Decode(&s.JavaInferenceMax); err != nil { + return err + } + + case "java_inference_max_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.JavaInferenceMaxInBytes = value + case float64: + f := int(v) + s.JavaInferenceMaxInBytes = f + } + + } + } + return nil +} + // NewJvmStats returns a JvmStats. func NewJvmStats() *JvmStats { r := &JvmStats{} diff --git a/typedapi/types/jvmthreads.go b/typedapi/types/jvmthreads.go old mode 100755 new mode 100644 index 4d8f839c6d..6203b4afcc --- a/typedapi/types/jvmthreads.go +++ b/typedapi/types/jvmthreads.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // JvmThreads type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L352-L355 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L352-L355 type JvmThreads struct { Count *int64 `json:"count,omitempty"` PeakCount *int64 `json:"peak_count,omitempty"` } +func (s *JvmThreads) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "peak_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PeakCount = &value + case float64: + f := int64(v) + s.PeakCount = &f + } + + } + } + return nil +} + // NewJvmThreads returns a JvmThreads. func NewJvmThreads() *JvmThreads { r := &JvmThreads{} diff --git a/typedapi/types/keeptypestokenfilter.go b/typedapi/types/keeptypestokenfilter.go old mode 100755 new mode 100644 index ffd0688bb7..c1a500f56a --- a/typedapi/types/keeptypestokenfilter.go +++ b/typedapi/types/keeptypestokenfilter.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/keeptypesmode" + + "bytes" + "errors" + "io" + + "encoding/json" ) // KeepTypesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L217-L221 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L218-L222 type KeepTypesTokenFilter struct { Mode *keeptypesmode.KeepTypesMode `json:"mode,omitempty"` Type string `json:"type,omitempty"` @@ -34,6 +40,46 @@ type KeepTypesTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *KeepTypesTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "types": + if err := dec.Decode(&s.Types); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKeepTypesTokenFilter returns a KeepTypesTokenFilter. func NewKeepTypesTokenFilter() *KeepTypesTokenFilter { r := &KeepTypesTokenFilter{} diff --git a/typedapi/types/keepwordstokenfilter.go b/typedapi/types/keepwordstokenfilter.go old mode 100755 new mode 100644 index 9d9cf34660..46fe690b1b --- a/typedapi/types/keepwordstokenfilter.go +++ b/typedapi/types/keepwordstokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KeepWordsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L223-L228 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L224-L229 type KeepWordsTokenFilter struct { KeepWords []string `json:"keep_words,omitempty"` KeepWordsCase *bool `json:"keep_words_case,omitempty"` @@ -31,6 +41,63 @@ type KeepWordsTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "keep_words": + if err := dec.Decode(&s.KeepWords); err != nil { + return err + } + + case "keep_words_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.KeepWordsCase = &value + case bool: + s.KeepWordsCase = &v + } + + case "keep_words_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.KeepWordsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKeepWordsTokenFilter returns a KeepWordsTokenFilter. func NewKeepWordsTokenFilter() *KeepWordsTokenFilter { r := &KeepWordsTokenFilter{} diff --git a/typedapi/types/keyedpercentiles.go b/typedapi/types/keyedpercentiles.go old mode 100755 new mode 100644 index ed6db257be..2f52054db0 --- a/typedapi/types/keyedpercentiles.go +++ b/typedapi/types/keyedpercentiles.go @@ -16,11 +16,44 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + // KeyedPercentiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L157-L157 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L158-L158 type KeyedPercentiles map[string]string + +func (s KeyedPercentiles) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + if key, ok := t.(string); ok { + + var tmp interface{} + if err := dec.Decode(&tmp); err != nil { + return err + } + s[key] = fmt.Sprintf("%v", tmp) + + } + } + return nil +} diff --git a/typedapi/types/keyedprocessor.go b/typedapi/types/keyedprocessor.go old mode 100755 new mode 100644 index 9d90c60152..06c50d7d5e --- a/typedapi/types/keyedprocessor.go +++ b/typedapi/types/keyedprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // KeyedProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L157-L160 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L157-L160 type KeyedProcessor struct { Stats *Processor `json:"stats,omitempty"` Type *string `json:"type,omitempty"` diff --git a/typedapi/types/keyvalueprocessor.go b/typedapi/types/keyvalueprocessor.go old mode 100755 new mode 100644 index 24e7c3a983..d445ad25ea --- a/typedapi/types/keyvalueprocessor.go +++ b/typedapi/types/keyvalueprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KeyValueProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L286-L298 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L286-L298 type KeyValueProcessor struct { Description *string `json:"description,omitempty"` ExcludeKeys []string `json:"exclude_keys,omitempty"` @@ -42,6 +52,157 @@ type KeyValueProcessor struct { ValueSplit string `json:"value_split"` } +func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "exclude_keys": + if err := dec.Decode(&s.ExcludeKeys); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "field_split": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FieldSplit = o + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "include_keys": + if err := dec.Decode(&s.IncludeKeys); err != nil { + return err + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "prefix": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Prefix = &o + + case "strip_brackets": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StripBrackets = &value + case bool: + s.StripBrackets = &v + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + case "trim_key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TrimKey = &o + + case "trim_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TrimValue = &o + + case "value_split": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueSplit = o + + } + } + return nil +} + // NewKeyValueProcessor returns a KeyValueProcessor. func NewKeyValueProcessor() *KeyValueProcessor { r := &KeyValueProcessor{} diff --git a/typedapi/types/keywordanalyzer.go b/typedapi/types/keywordanalyzer.go old mode 100755 new mode 100644 index ee6cd64697..af2df74b22 --- a/typedapi/types/keywordanalyzer.go +++ b/typedapi/types/keywordanalyzer.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // KeywordAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L47-L50 type KeywordAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *KeywordAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKeywordAnalyzer returns a KeywordAnalyzer. func NewKeywordAnalyzer() *KeywordAnalyzer { r := &KeywordAnalyzer{} diff --git a/typedapi/types/keywordmarkertokenfilter.go b/typedapi/types/keywordmarkertokenfilter.go old mode 100755 new mode 100644 index 2f11360d6a..d9f1144236 --- a/typedapi/types/keywordmarkertokenfilter.go +++ b/typedapi/types/keywordmarkertokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KeywordMarkerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L230-L236 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L231-L237 type KeywordMarkerTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` Keywords []string `json:"keywords,omitempty"` @@ -32,6 +42,71 @@ type KeywordMarkerTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ignore_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreCase = &value + case bool: + s.IgnoreCase = &v + } + + case "keywords": + if err := dec.Decode(&s.Keywords); err != nil { + return err + } + + case "keywords_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.KeywordsPath = &o + + case "keywords_pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.KeywordsPattern = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKeywordMarkerTokenFilter returns a KeywordMarkerTokenFilter. func NewKeywordMarkerTokenFilter() *KeywordMarkerTokenFilter { r := &KeywordMarkerTokenFilter{} diff --git a/typedapi/types/keywordproperty.go b/typedapi/types/keywordproperty.go old mode 100755 new mode 100644 index 5c638299ca..6e93943400 --- a/typedapi/types/keywordproperty.go +++ b/typedapi/types/keywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // KeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L89-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L89-L104 type KeywordProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -60,6 +62,7 @@ type KeywordProperty struct { } func (s *KeywordProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -74,18 +77,49 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -94,11 +128,23 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - if err := dec.Decode(&s.EagerGlobalOrdinals); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EagerGlobalOrdinals = &value + case bool: + s.EagerGlobalOrdinals = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -386,20 +432,40 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -408,26 +474,47 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "normalizer": - if err := dec.Decode(&s.Normalizer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Normalizer = &o case "norms": - if err := dec.Decode(&s.Norms); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Norms = &value + case bool: + s.Norms = &v } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.NullValue = &o case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -715,30 +802,60 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "split_queries_on_whitespace": - if err := dec.Decode(&s.SplitQueriesOnWhitespace); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitQueriesOnWhitespace = &value + case bool: + s.SplitQueriesOnWhitespace = &v } case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "type": diff --git a/typedapi/types/keywordtokenizer.go b/typedapi/types/keywordtokenizer.go old mode 100755 new mode 100644 index 50a6a0f64a..09cc4f29c1 --- a/typedapi/types/keywordtokenizer.go +++ b/typedapi/types/keywordtokenizer.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KeywordTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L61-L64 type KeywordTokenizer struct { BufferSize int `json:"buffer_size"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *KeywordTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buffer_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BufferSize = value + case float64: + f := int(v) + s.BufferSize = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKeywordTokenizer returns a KeywordTokenizer. func NewKeywordTokenizer() *KeywordTokenizer { r := &KeywordTokenizer{} diff --git a/typedapi/types/kibanatoken.go b/typedapi/types/kibanatoken.go old mode 100755 new mode 100644 index 82043f4a73..3f36548bba --- a/typedapi/types/kibanatoken.go +++ b/typedapi/types/kibanatoken.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // KibanaToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/enroll_kibana/Response.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/enroll_kibana/Response.ts#L27-L30 type KibanaToken struct { Name string `json:"name"` Value string `json:"value"` diff --git a/typedapi/types/knnquery.go b/typedapi/types/knnquery.go old mode 100755 new mode 100644 index da2e16ecc8..75b571b133 --- a/typedapi/types/knnquery.go +++ b/typedapi/types/knnquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Knn.ts#L26-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Knn.ts#L26-L41 type KnnQuery struct { // Boost Boost value to apply to kNN scores Boost *float32 `json:"boost,omitempty"` @@ -41,6 +51,103 @@ type KnnQuery struct { QueryVectorBuilder *QueryVectorBuilder `json:"query_vector_builder,omitempty"` } +func (s *KnnQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return err + } + } + + case "k": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.K = value + case float64: + f := int64(v) + s.K = f + } + + case "num_candidates": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumCandidates = value + case float64: + f := int64(v) + s.NumCandidates = f + } + + case "query_vector": + if err := dec.Decode(&s.QueryVector); err != nil { + return err + } + + case "query_vector_builder": + if err := dec.Decode(&s.QueryVectorBuilder); err != nil { + return err + } + + } + } + return nil +} + // NewKnnQuery returns a KnnQuery. func NewKnnQuery() *KnnQuery { r := &KnnQuery{} diff --git a/typedapi/types/kstemtokenfilter.go b/typedapi/types/kstemtokenfilter.go old mode 100755 new mode 100644 index 1d6c4e0165..200d177e3f --- a/typedapi/types/kstemtokenfilter.go +++ b/typedapi/types/kstemtokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // KStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L238-L240 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L239-L241 type KStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *KStemTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKStemTokenFilter returns a KStemTokenFilter. func NewKStemTokenFilter() *KStemTokenFilter { r := &KStemTokenFilter{} diff --git a/typedapi/types/kuromojianalyzer.go b/typedapi/types/kuromojianalyzer.go old mode 100755 new mode 100644 index 168b68b258..c7040dfcd1 --- a/typedapi/types/kuromojianalyzer.go +++ b/typedapi/types/kuromojianalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // KuromojiAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 type KuromojiAnalyzer struct { Mode kuromojitokenizationmode.KuromojiTokenizationMode `json:"mode"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/kuromojiiterationmarkcharfilter.go b/typedapi/types/kuromojiiterationmarkcharfilter.go old mode 100755 new mode 100644 index 81d910e4cb..851467a31f --- a/typedapi/types/kuromojiiterationmarkcharfilter.go +++ b/typedapi/types/kuromojiiterationmarkcharfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KuromojiIterationMarkCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 type KuromojiIterationMarkCharFilter struct { NormalizeKana bool `json:"normalize_kana"` NormalizeKanji bool `json:"normalize_kanji"` @@ -30,6 +40,64 @@ type KuromojiIterationMarkCharFilter struct { Version *string `json:"version,omitempty"` } +func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "normalize_kana": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.NormalizeKana = value + case bool: + s.NormalizeKana = v + } + + case "normalize_kanji": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.NormalizeKanji = value + case bool: + s.NormalizeKanji = v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKuromojiIterationMarkCharFilter returns a KuromojiIterationMarkCharFilter. func NewKuromojiIterationMarkCharFilter() *KuromojiIterationMarkCharFilter { r := &KuromojiIterationMarkCharFilter{} diff --git a/typedapi/types/kuromojipartofspeechtokenfilter.go b/typedapi/types/kuromojipartofspeechtokenfilter.go old mode 100755 new mode 100644 index 74a598e917..d53a81effa --- a/typedapi/types/kuromojipartofspeechtokenfilter.go +++ b/typedapi/types/kuromojipartofspeechtokenfilter.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // KuromojiPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 type KuromojiPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *KuromojiPartOfSpeechTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "stoptags": + if err := dec.Decode(&s.Stoptags); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKuromojiPartOfSpeechTokenFilter returns a KuromojiPartOfSpeechTokenFilter. func NewKuromojiPartOfSpeechTokenFilter() *KuromojiPartOfSpeechTokenFilter { r := &KuromojiPartOfSpeechTokenFilter{} diff --git a/typedapi/types/kuromojireadingformtokenfilter.go b/typedapi/types/kuromojireadingformtokenfilter.go old mode 100755 new mode 100644 index 1261bbff21..ff306c92d6 --- a/typedapi/types/kuromojireadingformtokenfilter.go +++ b/typedapi/types/kuromojireadingformtokenfilter.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KuromojiReadingFormTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 type KuromojiReadingFormTokenFilter struct { Type string `json:"type,omitempty"` UseRomaji bool `json:"use_romaji"` Version *string `json:"version,omitempty"` } +func (s *KuromojiReadingFormTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "use_romaji": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UseRomaji = value + case bool: + s.UseRomaji = v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKuromojiReadingFormTokenFilter returns a KuromojiReadingFormTokenFilter. func NewKuromojiReadingFormTokenFilter() *KuromojiReadingFormTokenFilter { r := &KuromojiReadingFormTokenFilter{} diff --git a/typedapi/types/kuromojistemmertokenfilter.go b/typedapi/types/kuromojistemmertokenfilter.go old mode 100755 new mode 100644 index bf7fce2a40..294c1eb5b9 --- a/typedapi/types/kuromojistemmertokenfilter.go +++ b/typedapi/types/kuromojistemmertokenfilter.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // KuromojiStemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 type KuromojiStemmerTokenFilter struct { MinimumLength int `json:"minimum_length"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *KuromojiStemmerTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "minimum_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinimumLength = value + case float64: + f := int(v) + s.MinimumLength = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKuromojiStemmerTokenFilter returns a KuromojiStemmerTokenFilter. func NewKuromojiStemmerTokenFilter() *KuromojiStemmerTokenFilter { r := &KuromojiStemmerTokenFilter{} diff --git a/typedapi/types/kuromojitokenizer.go b/typedapi/types/kuromojitokenizer.go old mode 100755 new mode 100644 index c31a167ba9..0417d3ecde --- a/typedapi/types/kuromojitokenizer.go +++ b/typedapi/types/kuromojitokenizer.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/kuromojitokenizationmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // KuromojiTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 type KuromojiTokenizer struct { DiscardCompoundToken *bool `json:"discard_compound_token,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -39,6 +47,106 @@ type KuromojiTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "discard_compound_token": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DiscardCompoundToken = &value + case bool: + s.DiscardCompoundToken = &v + } + + case "discard_punctuation": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DiscardPunctuation = &value + case bool: + s.DiscardPunctuation = &v + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "nbest_cost": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NbestCost = &value + case float64: + f := int(v) + s.NbestCost = &f + } + + case "nbest_examples": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NbestExamples = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "user_dictionary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UserDictionary = &o + + case "user_dictionary_rules": + if err := dec.Decode(&s.UserDictionaryRules); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewKuromojiTokenizer returns a KuromojiTokenizer. func NewKuromojiTokenizer() *KuromojiTokenizer { r := &KuromojiTokenizer{} diff --git a/typedapi/types/languageanalyzer.go b/typedapi/types/languageanalyzer.go old mode 100755 new mode 100644 index 65b0efd7fb..fa9d61f7f6 --- a/typedapi/types/languageanalyzer.go +++ b/typedapi/types/languageanalyzer.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/language" + + "bytes" + "errors" + "io" + + "encoding/json" ) // LanguageAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L52-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L52-L59 type LanguageAnalyzer struct { Language language.Language `json:"language"` StemExclusion []string `json:"stem_exclusion"` @@ -36,6 +42,70 @@ type LanguageAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *LanguageAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "language": + if err := dec.Decode(&s.Language); err != nil { + return err + } + + case "stem_exclusion": + if err := dec.Decode(&s.StemExclusion); err != nil { + return err + } + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "stopwords_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StopwordsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLanguageAnalyzer returns a LanguageAnalyzer. func NewLanguageAnalyzer() *LanguageAnalyzer { r := &LanguageAnalyzer{} diff --git a/typedapi/types/languagecontext.go b/typedapi/types/languagecontext.go old mode 100755 new mode 100644 index 5f9ad4dd77..9749893f92 --- a/typedapi/types/languagecontext.go +++ b/typedapi/types/languagecontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // LanguageContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/get_script_languages/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/get_script_languages/types.ts#L22-L25 type LanguageContext struct { Contexts []string `json:"contexts"` Language scriptlanguage.ScriptLanguage `json:"language"` diff --git a/typedapi/types/laplacesmoothingmodel.go b/typedapi/types/laplacesmoothingmodel.go old mode 100755 new mode 100644 index 0d97b3091f..61d70bca06 --- a/typedapi/types/laplacesmoothingmodel.go +++ b/typedapi/types/laplacesmoothingmodel.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LaplaceSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L212-L214 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L216-L218 type LaplaceSmoothingModel struct { Alpha Float64 `json:"alpha"` } +func (s *LaplaceSmoothingModel) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alpha": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Alpha = f + case float64: + f := Float64(v) + s.Alpha = f + } + + } + } + return nil +} + // NewLaplaceSmoothingModel returns a LaplaceSmoothingModel. func NewLaplaceSmoothingModel() *LaplaceSmoothingModel { r := &LaplaceSmoothingModel{} diff --git a/typedapi/types/latest.go b/typedapi/types/latest.go old mode 100755 new mode 100644 index b2359d0bc1..9d6ee3bbe2 --- a/typedapi/types/latest.go +++ b/typedapi/types/latest.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Latest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L47-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L47-L52 type Latest struct { // Sort Specifies the date field that is used to identify the latest documents. Sort string `json:"sort"` @@ -30,6 +38,36 @@ type Latest struct { UniqueKey []string `json:"unique_key"` } +func (s *Latest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "unique_key": + if err := dec.Decode(&s.UniqueKey); err != nil { + return err + } + + } + } + return nil +} + // NewLatest returns a Latest. func NewLatest() *Latest { r := &Latest{} diff --git a/typedapi/types/latlongeolocation.go b/typedapi/types/latlongeolocation.go old mode 100755 new mode 100644 index e7c8e234b2..7307544b22 --- a/typedapi/types/latlongeolocation.go +++ b/typedapi/types/latlongeolocation.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LatLonGeoLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L110-L113 type LatLonGeoLocation struct { Lat Float64 `json:"lat"` Lon Float64 `json:"lon"` } +func (s *LatLonGeoLocation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lat": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lat = f + case float64: + f := Float64(v) + s.Lat = f + } + + case "lon": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lon = f + case float64: + f := Float64(v) + s.Lon = f + } + + } + } + return nil +} + // NewLatLonGeoLocation returns a LatLonGeoLocation. func NewLatLonGeoLocation() *LatLonGeoLocation { r := &LatLonGeoLocation{} diff --git a/typedapi/types/lengthtokenfilter.go b/typedapi/types/lengthtokenfilter.go old mode 100755 new mode 100644 index 5cc24942d9..387dba7c1b --- a/typedapi/types/lengthtokenfilter.go +++ b/typedapi/types/lengthtokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LengthTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L242-L246 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L243-L247 type LengthTokenFilter struct { Max *int `json:"max,omitempty"` Min *int `json:"min,omitempty"` @@ -30,6 +40,68 @@ type LengthTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Max = &value + case float64: + f := int(v) + s.Max = &f + } + + case "min": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Min = &value + case float64: + f := int(v) + s.Min = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLengthTokenFilter returns a LengthTokenFilter. func NewLengthTokenFilter() *LengthTokenFilter { r := &LengthTokenFilter{} diff --git a/typedapi/types/lettertokenizer.go b/typedapi/types/lettertokenizer.go old mode 100755 new mode 100644 index 432ee26c81..3542cd9c3e --- a/typedapi/types/lettertokenizer.go +++ b/typedapi/types/lettertokenizer.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LetterTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L66-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L66-L68 type LetterTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *LetterTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLetterTokenizer returns a LetterTokenizer. func NewLetterTokenizer() *LetterTokenizer { r := &LetterTokenizer{} diff --git a/typedapi/types/license.go b/typedapi/types/license.go old mode 100755 new mode 100644 index e93da51783..986f51e459 --- a/typedapi/types/license.go +++ b/typedapi/types/license.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensetype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // License type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/_types/License.ts#L42-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/_types/License.ts#L42-L53 type License struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` IssueDateInMillis int64 `json:"issue_date_in_millis"` @@ -40,6 +48,98 @@ type License struct { Uid string `json:"uid"` } +func (s *License) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expiry_date_in_millis": + if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { + return err + } + + case "issue_date_in_millis": + if err := dec.Decode(&s.IssueDateInMillis); err != nil { + return err + } + + case "issued_to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IssuedTo = o + + case "issuer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Issuer = o + + case "max_nodes": + if err := dec.Decode(&s.MaxNodes); err != nil { + return err + } + + case "max_resource_units": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxResourceUnits = &value + case float64: + f := int64(v) + s.MaxResourceUnits = &f + } + + case "signature": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Signature = o + + case "start_date_in_millis": + if err := dec.Decode(&s.StartDateInMillis); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "uid": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Uid = o + + } + } + return nil +} + // NewLicense returns a License. func NewLicense() *License { r := &License{} diff --git a/typedapi/types/licenseinformation.go b/typedapi/types/licenseinformation.go old mode 100755 new mode 100644 index 09f594b15a..3f6fc4e467 --- a/typedapi/types/licenseinformation.go +++ b/typedapi/types/licenseinformation.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensestatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensetype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // LicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/license/get/types.ts#L25-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/license/get/types.ts#L25-L38 type LicenseInformation struct { ExpiryDate DateTime `json:"expiry_date,omitempty"` ExpiryDateInMillis *int64 `json:"expiry_date_in_millis,omitempty"` @@ -43,6 +49,92 @@ type LicenseInformation struct { Uid string `json:"uid"` } +func (s *LicenseInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expiry_date": + if err := dec.Decode(&s.ExpiryDate); err != nil { + return err + } + + case "expiry_date_in_millis": + if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { + return err + } + + case "issue_date": + if err := dec.Decode(&s.IssueDate); err != nil { + return err + } + + case "issue_date_in_millis": + if err := dec.Decode(&s.IssueDateInMillis); err != nil { + return err + } + + case "issued_to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IssuedTo = o + + case "issuer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Issuer = o + + case "max_nodes": + if err := dec.Decode(&s.MaxNodes); err != nil { + return err + } + + case "max_resource_units": + if err := dec.Decode(&s.MaxResourceUnits); err != nil { + return err + } + + case "start_date_in_millis": + if err := dec.Decode(&s.StartDateInMillis); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "uid": + if err := dec.Decode(&s.Uid); err != nil { + return err + } + + } + } + return nil +} + // NewLicenseInformation returns a LicenseInformation. func NewLicenseInformation() *LicenseInformation { r := &LicenseInformation{} diff --git a/typedapi/types/lifecycle.go b/typedapi/types/lifecycle.go old mode 100755 new mode 100644 index ed549e0074..879f2078f2 --- a/typedapi/types/lifecycle.go +++ b/typedapi/types/lifecycle.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Lifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/get_lifecycle/types.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/get_lifecycle/types.ts#L24-L28 type Lifecycle struct { ModifiedDate DateTime `json:"modified_date"` Policy IlmPolicy `json:"policy"` Version int64 `json:"version"` } +func (s *Lifecycle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "modified_date": + if err := dec.Decode(&s.ModifiedDate); err != nil { + return err + } + + case "policy": + if err := dec.Decode(&s.Policy); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLifecycle returns a Lifecycle. func NewLifecycle() *Lifecycle { r := &Lifecycle{} diff --git a/typedapi/types/lifecycleexplain.go b/typedapi/types/lifecycleexplain.go old mode 100755 new mode 100644 index f7796113d4..354f1feb06 --- a/typedapi/types/lifecycleexplain.go +++ b/typedapi/types/lifecycleexplain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // LifecycleExplainManaged // LifecycleExplainUnmanaged // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/explain_lifecycle/types.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/explain_lifecycle/types.ts#L59-L62 type LifecycleExplain interface{} diff --git a/typedapi/types/lifecycleexplainmanaged.go b/typedapi/types/lifecycleexplainmanaged.go old mode 100755 new mode 100644 index 48ab564d72..bc23ef7600 --- a/typedapi/types/lifecycleexplainmanaged.go +++ b/typedapi/types/lifecycleexplainmanaged.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // LifecycleExplainManaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/explain_lifecycle/types.ts#L26-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/explain_lifecycle/types.ts#L26-L52 type LifecycleExplainManaged struct { Action *string `json:"action,omitempty"` ActionTime DateTime `json:"action_time,omitempty"` @@ -53,6 +59,164 @@ type LifecycleExplainManaged struct { TimeSinceIndexCreation Duration `json:"time_since_index_creation,omitempty"` } +func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action": + if err := dec.Decode(&s.Action); err != nil { + return err + } + + case "action_time": + if err := dec.Decode(&s.ActionTime); err != nil { + return err + } + + case "action_time_millis": + if err := dec.Decode(&s.ActionTimeMillis); err != nil { + return err + } + + case "age": + if err := dec.Decode(&s.Age); err != nil { + return err + } + + case "failed_step": + if err := dec.Decode(&s.FailedStep); err != nil { + return err + } + + case "failed_step_retry_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailedStepRetryCount = &value + case float64: + f := int(v) + s.FailedStepRetryCount = &f + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "index_creation_date": + if err := dec.Decode(&s.IndexCreationDate); err != nil { + return err + } + + case "index_creation_date_millis": + if err := dec.Decode(&s.IndexCreationDateMillis); err != nil { + return err + } + + case "is_auto_retryable_error": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsAutoRetryableError = &value + case bool: + s.IsAutoRetryableError = &v + } + + case "lifecycle_date": + if err := dec.Decode(&s.LifecycleDate); err != nil { + return err + } + + case "lifecycle_date_millis": + if err := dec.Decode(&s.LifecycleDateMillis); err != nil { + return err + } + + case "managed": + if err := dec.Decode(&s.Managed); err != nil { + return err + } + + case "phase": + if err := dec.Decode(&s.Phase); err != nil { + return err + } + + case "phase_execution": + if err := dec.Decode(&s.PhaseExecution); err != nil { + return err + } + + case "phase_time": + if err := dec.Decode(&s.PhaseTime); err != nil { + return err + } + + case "phase_time_millis": + if err := dec.Decode(&s.PhaseTimeMillis); err != nil { + return err + } + + case "policy": + if err := dec.Decode(&s.Policy); err != nil { + return err + } + + case "step": + if err := dec.Decode(&s.Step); err != nil { + return err + } + + case "step_info": + if s.StepInfo == nil { + s.StepInfo = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.StepInfo); err != nil { + return err + } + + case "step_time": + if err := dec.Decode(&s.StepTime); err != nil { + return err + } + + case "step_time_millis": + if err := dec.Decode(&s.StepTimeMillis); err != nil { + return err + } + + case "time_since_index_creation": + if err := dec.Decode(&s.TimeSinceIndexCreation); err != nil { + return err + } + + } + } + return nil +} + // NewLifecycleExplainManaged returns a LifecycleExplainManaged. func NewLifecycleExplainManaged() *LifecycleExplainManaged { r := &LifecycleExplainManaged{ diff --git a/typedapi/types/lifecycleexplainphaseexecution.go b/typedapi/types/lifecycleexplainphaseexecution.go old mode 100755 new mode 100644 index db52e2575f..5a7a8bea47 --- a/typedapi/types/lifecycleexplainphaseexecution.go +++ b/typedapi/types/lifecycleexplainphaseexecution.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LifecycleExplainPhaseExecution type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/explain_lifecycle/types.ts#L64-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/explain_lifecycle/types.ts#L64-L68 type LifecycleExplainPhaseExecution struct { ModifiedDateInMillis int64 `json:"modified_date_in_millis"` Policy string `json:"policy"` Version int64 `json:"version"` } +func (s *LifecycleExplainPhaseExecution) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "modified_date_in_millis": + if err := dec.Decode(&s.ModifiedDateInMillis); err != nil { + return err + } + + case "policy": + if err := dec.Decode(&s.Policy); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLifecycleExplainPhaseExecution returns a LifecycleExplainPhaseExecution. func NewLifecycleExplainPhaseExecution() *LifecycleExplainPhaseExecution { r := &LifecycleExplainPhaseExecution{} diff --git a/typedapi/types/lifecycleexplainunmanaged.go b/typedapi/types/lifecycleexplainunmanaged.go old mode 100755 new mode 100644 index 7369785e8a..0365d49818 --- a/typedapi/types/lifecycleexplainunmanaged.go +++ b/typedapi/types/lifecycleexplainunmanaged.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LifecycleExplainUnmanaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/explain_lifecycle/types.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/explain_lifecycle/types.ts#L54-L57 type LifecycleExplainUnmanaged struct { Index string `json:"index"` Managed bool `json:"managed,omitempty"` } +func (s *LifecycleExplainUnmanaged) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "managed": + if err := dec.Decode(&s.Managed); err != nil { + return err + } + + } + } + return nil +} + // NewLifecycleExplainUnmanaged returns a LifecycleExplainUnmanaged. func NewLifecycleExplainUnmanaged() *LifecycleExplainUnmanaged { r := &LifecycleExplainUnmanaged{} diff --git a/typedapi/types/like.go b/typedapi/types/like.go old mode 100755 new mode 100644 index b306839b42..f3f8398f1c --- a/typedapi/types/like.go +++ b/typedapi/types/like.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // LikeDocument // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L103-L108 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L103-L108 type Like interface{} diff --git a/typedapi/types/likedocument.go b/typedapi/types/likedocument.go old mode 100755 new mode 100644 index 77807c0d72..7479acbc78 --- a/typedapi/types/likedocument.go +++ b/typedapi/types/likedocument.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // LikeDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L91-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L91-L101 type LikeDocument struct { Doc json.RawMessage `json:"doc,omitempty"` Fields []string `json:"fields,omitempty"` @@ -40,6 +44,69 @@ type LikeDocument struct { VersionType *versiontype.VersionType `json:"version_type,omitempty"` } +func (s *LikeDocument) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc": + if err := dec.Decode(&s.Doc); err != nil { + return err + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "per_field_analyzer": + if s.PerFieldAnalyzer == nil { + s.PerFieldAnalyzer = make(map[string]string, 0) + } + if err := dec.Decode(&s.PerFieldAnalyzer); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "version_type": + if err := dec.Decode(&s.VersionType); err != nil { + return err + } + + } + } + return nil +} + // NewLikeDocument returns a LikeDocument. func NewLikeDocument() *LikeDocument { r := &LikeDocument{ diff --git a/typedapi/types/limits.go b/typedapi/types/limits.go old mode 100755 new mode 100644 index 61066d0ebb..0a1f3fdf2f --- a/typedapi/types/limits.go +++ b/typedapi/types/limits.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Limits type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/types.ts#L34-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/types.ts#L34-L38 type Limits struct { EffectiveMaxModelMemoryLimit string `json:"effective_max_model_memory_limit"` MaxModelMemoryLimit *string `json:"max_model_memory_limit,omitempty"` diff --git a/typedapi/types/limittokencounttokenfilter.go b/typedapi/types/limittokencounttokenfilter.go old mode 100755 new mode 100644 index dc66dbaa4f..63fae1b28b --- a/typedapi/types/limittokencounttokenfilter.go +++ b/typedapi/types/limittokencounttokenfilter.go @@ -16,18 +16,77 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LimitTokenCountTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L248-L252 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L249-L253 type LimitTokenCountTokenFilter struct { - ConsumeAllTokens *bool `json:"consume_all_tokens,omitempty"` - MaxTokenCount *int `json:"max_token_count,omitempty"` - Type string `json:"type,omitempty"` - Version *string `json:"version,omitempty"` + ConsumeAllTokens *bool `json:"consume_all_tokens,omitempty"` + MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` + Type string `json:"type,omitempty"` + Version *string `json:"version,omitempty"` +} + +func (s *LimitTokenCountTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "consume_all_tokens": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ConsumeAllTokens = &value + case bool: + s.ConsumeAllTokens = &v + } + + case "max_token_count": + if err := dec.Decode(&s.MaxTokenCount); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil } // NewLimitTokenCountTokenFilter returns a LimitTokenCountTokenFilter. diff --git a/typedapi/types/linearinterpolationsmoothingmodel.go b/typedapi/types/linearinterpolationsmoothingmodel.go old mode 100755 new mode 100644 index 94903640cc..794d993c14 --- a/typedapi/types/linearinterpolationsmoothingmodel.go +++ b/typedapi/types/linearinterpolationsmoothingmodel.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LinearInterpolationSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L216-L220 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L220-L224 type LinearInterpolationSmoothingModel struct { BigramLambda Float64 `json:"bigram_lambda"` TrigramLambda Float64 `json:"trigram_lambda"` UnigramLambda Float64 `json:"unigram_lambda"` } +func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bigram_lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.BigramLambda = f + case float64: + f := Float64(v) + s.BigramLambda = f + } + + case "trigram_lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.TrigramLambda = f + case float64: + f := Float64(v) + s.TrigramLambda = f + } + + case "unigram_lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.UnigramLambda = f + case float64: + f := Float64(v) + s.UnigramLambda = f + } + + } + } + return nil +} + // NewLinearInterpolationSmoothingModel returns a LinearInterpolationSmoothingModel. func NewLinearInterpolationSmoothingModel() *LinearInterpolationSmoothingModel { r := &LinearInterpolationSmoothingModel{} diff --git a/typedapi/types/linearmovingaverageaggregation.go b/typedapi/types/linearmovingaverageaggregation.go old mode 100755 new mode 100644 index 5314c2d84a..c3141e5179 --- a/typedapi/types/linearmovingaverageaggregation.go +++ b/typedapi/types/linearmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,27 +27,30 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // LinearMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L202-L205 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L202-L205 type LinearMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Minimize *bool `json:"minimize,omitempty"` - Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` - Predict *int `json:"predict,omitempty"` - Settings EmptyObject `json:"settings"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Minimize *bool `json:"minimize,omitempty"` + Model string `json:"model,omitempty"` + Name *string `json:"name,omitempty"` + Predict *int `json:"predict,omitempty"` + Settings EmptyObject `json:"settings"` + Window *int `json:"window,omitempty"` } func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,9 +70,12 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -82,8 +88,17 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "minimize": - if err := dec.Decode(&s.Minimize); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Minimize = &value + case bool: + s.Minimize = &v } case "model": @@ -92,13 +107,27 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "predict": - if err := dec.Decode(&s.Predict); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Predict = &value + case float64: + f := int(v) + s.Predict = &f } case "settings": @@ -107,8 +136,19 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/loggingaction.go b/typedapi/types/loggingaction.go old mode 100755 new mode 100644 index 2ad62e5e9b..d9e8d86cab --- a/typedapi/types/loggingaction.go +++ b/typedapi/types/loggingaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // LoggingAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L281-L285 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L281-L285 type LoggingAction struct { Category *string `json:"category,omitempty"` Level *string `json:"level,omitempty"` diff --git a/typedapi/types/loggingresult.go b/typedapi/types/loggingresult.go old mode 100755 new mode 100644 index 960c7fdc7f..b9964dfe2c --- a/typedapi/types/loggingresult.go +++ b/typedapi/types/loggingresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // LoggingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L287-L289 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L287-L289 type LoggingResult struct { LoggedText string `json:"logged_text"` } diff --git a/typedapi/types/logstashpipeline.go b/typedapi/types/logstashpipeline.go old mode 100755 new mode 100644 index a8b10215ed..ce088cbb5b --- a/typedapi/types/logstashpipeline.go +++ b/typedapi/types/logstashpipeline.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LogstashPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/_types/Pipeline.ts#L37-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/logstash/_types/Pipeline.ts#L37-L44 type LogstashPipeline struct { Description string `json:"description"` LastModified DateTime `json:"last_modified"` @@ -32,6 +40,65 @@ type LogstashPipeline struct { Username string `json:"username"` } +func (s *LogstashPipeline) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "last_modified": + if err := dec.Decode(&s.LastModified); err != nil { + return err + } + + case "pipeline": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pipeline = o + + case "pipeline_metadata": + if err := dec.Decode(&s.PipelineMetadata); err != nil { + return err + } + + case "pipeline_settings": + if err := dec.Decode(&s.PipelineSettings); err != nil { + return err + } + + case "username": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Username = o + + } + } + return nil +} + // NewLogstashPipeline returns a LogstashPipeline. func NewLogstashPipeline() *LogstashPipeline { r := &LogstashPipeline{} diff --git a/typedapi/types/longnumberproperty.go b/typedapi/types/longnumberproperty.go old mode 100755 new mode 100644 index 122c75642f..82fa30672d --- a/typedapi/types/longnumberproperty.go +++ b/typedapi/types/longnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // LongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L151-L154 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L151-L154 type LongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type LongNumberProperty struct { } func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,35 +435,77 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NullValue = &value + case float64: + f := int64(v) + s.NullValue = &f } case "on_script_error": @@ -426,6 +514,9 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +804,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +816,39 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/longrangeproperty.go b/typedapi/types/longrangeproperty.go old mode 100755 new mode 100644 index 8397458360..43cdca288a --- a/typedapi/types/longrangeproperty.go +++ b/typedapi/types/longrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // LongRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/range.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/range.ts#L50-L52 type LongRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -51,6 +53,7 @@ type LongRangeProperty struct { } func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,23 +68,63 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -90,6 +133,9 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -377,28 +423,54 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +758,32 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/longraretermsaggregate.go b/typedapi/types/longraretermsaggregate.go old mode 100755 new mode 100644 index 303e919859..88bcca8943 --- a/typedapi/types/longraretermsaggregate.go +++ b/typedapi/types/longraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // LongRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L430-L435 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L431-L436 type LongRareTermsAggregate struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *LongRareTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *LongRareTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]LongRareTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []LongRareTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/longraretermsbucket.go b/typedapi/types/longraretermsbucket.go old mode 100755 new mode 100644 index 6581987a4b..1f5cc0da02 --- a/typedapi/types/longraretermsbucket.go +++ b/typedapi/types/longraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // LongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L437-L440 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L438-L441 type LongRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -43,6 +45,7 @@ type LongRareTermsBucket struct { } func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -56,462 +59,556 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Key = value + case float64: + f := int64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -537,6 +634,7 @@ func (s LongRareTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/longtermsaggregate.go b/typedapi/types/longtermsaggregate.go old mode 100755 new mode 100644 index ae86e3b912..9db14c3f53 --- a/typedapi/types/longtermsaggregate.go +++ b/typedapi/types/longtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // LongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L398-L403 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L399-L404 type LongTermsAggregate struct { - Buckets BucketsLongTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsLongTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]LongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []LongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/longtermsbucket.go b/typedapi/types/longtermsbucket.go old mode 100755 new mode 100644 index be0e06f47f..42a9f55e57 --- a/typedapi/types/longtermsbucket.go +++ b/typedapi/types/longtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // LongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L405-L408 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L406-L409 type LongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -44,6 +46,7 @@ type LongTermsBucket struct { } func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,467 +60,571 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "doc_count_error": - if err := dec.Decode(&s.DocCountError); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountError = &value + case float64: + f := int64(v) + s.DocCountError = &f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Key = value + case float64: + f := int64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -543,6 +650,7 @@ func (s LongTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/lowercasenormalizer.go b/typedapi/types/lowercasenormalizer.go old mode 100755 new mode 100644 index 288ca1fe94..1c76477b66 --- a/typedapi/types/lowercasenormalizer.go +++ b/typedapi/types/lowercasenormalizer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // LowercaseNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/normalizers.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/normalizers.ts#L26-L28 type LowercaseNormalizer struct { Type string `json:"type,omitempty"` } diff --git a/typedapi/types/lowercaseprocessor.go b/typedapi/types/lowercaseprocessor.go old mode 100755 new mode 100644 index bbc1907df3..ac7be2b1be --- a/typedapi/types/lowercaseprocessor.go +++ b/typedapi/types/lowercaseprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // LowercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L300-L304 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L300-L304 type LowercaseProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type LowercaseProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewLowercaseProcessor returns a LowercaseProcessor. func NewLowercaseProcessor() *LowercaseProcessor { r := &LowercaseProcessor{} diff --git a/typedapi/types/lowercasetokenfilter.go b/typedapi/types/lowercasetokenfilter.go old mode 100755 new mode 100644 index da3083932a..933f220208 --- a/typedapi/types/lowercasetokenfilter.go +++ b/typedapi/types/lowercasetokenfilter.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LowercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L254-L257 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L255-L258 type LowercaseTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *LowercaseTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "language": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Language = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLowercaseTokenFilter returns a LowercaseTokenFilter. func NewLowercaseTokenFilter() *LowercaseTokenFilter { r := &LowercaseTokenFilter{} diff --git a/typedapi/types/lowercasetokenizer.go b/typedapi/types/lowercasetokenizer.go old mode 100755 new mode 100644 index 9c94540ce3..ad8507cece --- a/typedapi/types/lowercasetokenizer.go +++ b/typedapi/types/lowercasetokenizer.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // LowercaseTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L70-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L70-L72 type LowercaseTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *LowercaseTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewLowercaseTokenizer returns a LowercaseTokenizer. func NewLowercaseTokenizer() *LowercaseTokenizer { r := &LowercaseTokenizer{} diff --git a/typedapi/types/machinelearning.go b/typedapi/types/machinelearning.go old mode 100755 new mode 100644 index b8b27c6d14..a68e493a2a --- a/typedapi/types/machinelearning.go +++ b/typedapi/types/machinelearning.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MachineLearning type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L363-L370 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L363-L370 type MachineLearning struct { Available bool `json:"available"` DataFrameAnalyticsJobs MlDataFrameAnalyticsJobs `json:"data_frame_analytics_jobs"` @@ -35,6 +45,96 @@ type MachineLearning struct { NodeCount int `json:"node_count"` } +func (s *MachineLearning) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "data_frame_analytics_jobs": + if err := dec.Decode(&s.DataFrameAnalyticsJobs); err != nil { + return err + } + + case "datafeeds": + if s.Datafeeds == nil { + s.Datafeeds = make(map[string]XpackDatafeed, 0) + } + if err := dec.Decode(&s.Datafeeds); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "inference": + if err := dec.Decode(&s.Inference); err != nil { + return err + } + + case "jobs": + if s.Jobs == nil { + s.Jobs = make(map[string]JobUsage, 0) + } + if err := dec.Decode(&s.Jobs); err != nil { + return err + } + + case "node_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NodeCount = value + case float64: + f := int(v) + s.NodeCount = f + } + + } + } + return nil +} + // NewMachineLearning returns a MachineLearning. func NewMachineLearning() *MachineLearning { r := &MachineLearning{ diff --git a/typedapi/types/manageuserprivileges.go b/typedapi/types/manageuserprivileges.go old mode 100755 new mode 100644 index 4430127e18..950fc8806a --- a/typedapi/types/manageuserprivileges.go +++ b/typedapi/types/manageuserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ManageUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L195-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L197-L199 type ManageUserPrivileges struct { Applications []string `json:"applications"` } diff --git a/typedapi/types/mapboxvectortiles.go b/typedapi/types/mapboxvectortiles.go old mode 100755 new mode 100644 index 3876e3cb7a..276e887e25 --- a/typedapi/types/mapboxvectortiles.go +++ b/typedapi/types/mapboxvectortiles.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MapboxVectorTiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Binary.ts#L21-L21 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Binary.ts#L21-L21 type MapboxVectorTiles []byte diff --git a/typedapi/types/mappingcharfilter.go b/typedapi/types/mappingcharfilter.go old mode 100755 new mode 100644 index f11a3a13b5..bc93c97e23 --- a/typedapi/types/mappingcharfilter.go +++ b/typedapi/types/mappingcharfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MappingCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/char_filters.ts#L47-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/char_filters.ts#L47-L51 type MappingCharFilter struct { Mappings []string `json:"mappings,omitempty"` MappingsPath *string `json:"mappings_path,omitempty"` @@ -30,6 +38,49 @@ type MappingCharFilter struct { Version *string `json:"version,omitempty"` } +func (s *MappingCharFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "mappings_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MappingsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewMappingCharFilter returns a MappingCharFilter. func NewMappingCharFilter() *MappingCharFilter { r := &MappingCharFilter{} diff --git a/typedapi/types/mappinglimitsettings.go b/typedapi/types/mappinglimitsettings.go old mode 100755 new mode 100644 index 4cb56bfdcc..b526617471 --- a/typedapi/types/mappinglimitsettings.go +++ b/typedapi/types/mappinglimitsettings.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L402-L415 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L402-L415 type MappingLimitSettings struct { Coerce *bool `json:"coerce,omitempty"` Depth *MappingLimitSettingsDepth `json:"depth,omitempty"` @@ -34,6 +44,84 @@ type MappingLimitSettings struct { TotalFields *MappingLimitSettingsTotalFields `json:"total_fields,omitempty"` } +func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "coerce": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v + } + + case "depth": + if err := dec.Decode(&s.Depth); err != nil { + return err + } + + case "dimension_fields": + if err := dec.Decode(&s.DimensionFields); err != nil { + return err + } + + case "field_name_length": + if err := dec.Decode(&s.FieldNameLength); err != nil { + return err + } + + case "ignore_malformed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v + } + + case "nested_fields": + if err := dec.Decode(&s.NestedFields); err != nil { + return err + } + + case "nested_objects": + if err := dec.Decode(&s.NestedObjects); err != nil { + return err + } + + case "total_fields": + if err := dec.Decode(&s.TotalFields); err != nil { + return err + } + + } + } + return nil +} + // NewMappingLimitSettings returns a MappingLimitSettings. func NewMappingLimitSettings() *MappingLimitSettings { r := &MappingLimitSettings{} diff --git a/typedapi/types/mappinglimitsettingsdepth.go b/typedapi/types/mappinglimitsettingsdepth.go old mode 100755 new mode 100644 index dc09731761..6bc172f3a2 --- a/typedapi/types/mappinglimitsettingsdepth.go +++ b/typedapi/types/mappinglimitsettingsdepth.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsDepth type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L427-L434 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L427-L434 type MappingLimitSettingsDepth struct { // Limit The maximum depth for a field, which is measured as the number of inner // objects. For instance, if all fields are defined @@ -31,6 +41,42 @@ type MappingLimitSettingsDepth struct { Limit *int `json:"limit,omitempty"` } +func (s *MappingLimitSettingsDepth) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsDepth returns a MappingLimitSettingsDepth. func NewMappingLimitSettingsDepth() *MappingLimitSettingsDepth { r := &MappingLimitSettingsDepth{} diff --git a/typedapi/types/mappinglimitsettingsdimensionfields.go b/typedapi/types/mappinglimitsettingsdimensionfields.go old mode 100755 new mode 100644 index 078556eeb4..77d6be39ab --- a/typedapi/types/mappinglimitsettingsdimensionfields.go +++ b/typedapi/types/mappinglimitsettingsdimensionfields.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsDimensionFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L464-L470 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L464-L470 type MappingLimitSettingsDimensionFields struct { // Limit [preview] This functionality is in technical preview and may be changed or // removed in a future release. Elastic will @@ -31,6 +41,42 @@ type MappingLimitSettingsDimensionFields struct { Limit *int `json:"limit,omitempty"` } +func (s *MappingLimitSettingsDimensionFields) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsDimensionFields returns a MappingLimitSettingsDimensionFields. func NewMappingLimitSettingsDimensionFields() *MappingLimitSettingsDimensionFields { r := &MappingLimitSettingsDimensionFields{} diff --git a/typedapi/types/mappinglimitsettingsfieldnamelength.go b/typedapi/types/mappinglimitsettingsfieldnamelength.go old mode 100755 new mode 100644 index abf2e4578c..fe431ae069 --- a/typedapi/types/mappinglimitsettingsfieldnamelength.go +++ b/typedapi/types/mappinglimitsettingsfieldnamelength.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsFieldNameLength type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L455-L462 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L455-L462 type MappingLimitSettingsFieldNameLength struct { // Limit Setting for the maximum length of a field name. This setting isn’t really // something that addresses mappings explosion but @@ -33,6 +43,41 @@ type MappingLimitSettingsFieldNameLength struct { Limit *int64 `json:"limit,omitempty"` } +func (s *MappingLimitSettingsFieldNameLength) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int64(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsFieldNameLength returns a MappingLimitSettingsFieldNameLength. func NewMappingLimitSettingsFieldNameLength() *MappingLimitSettingsFieldNameLength { r := &MappingLimitSettingsFieldNameLength{} diff --git a/typedapi/types/mappinglimitsettingsnestedfields.go b/typedapi/types/mappinglimitsettingsnestedfields.go old mode 100755 new mode 100644 index 2e13e18e34..efc1dd06ae --- a/typedapi/types/mappinglimitsettingsnestedfields.go +++ b/typedapi/types/mappinglimitsettingsnestedfields.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsNestedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L436-L444 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L436-L444 type MappingLimitSettingsNestedFields struct { // Limit The maximum number of distinct nested mappings in an index. The nested type // should only be used in special cases, when @@ -32,6 +42,42 @@ type MappingLimitSettingsNestedFields struct { Limit *int `json:"limit,omitempty"` } +func (s *MappingLimitSettingsNestedFields) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsNestedFields returns a MappingLimitSettingsNestedFields. func NewMappingLimitSettingsNestedFields() *MappingLimitSettingsNestedFields { r := &MappingLimitSettingsNestedFields{} diff --git a/typedapi/types/mappinglimitsettingsnestedobjects.go b/typedapi/types/mappinglimitsettingsnestedobjects.go old mode 100755 new mode 100644 index bce1cbe600..9362565be0 --- a/typedapi/types/mappinglimitsettingsnestedobjects.go +++ b/typedapi/types/mappinglimitsettingsnestedobjects.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsNestedObjects type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L446-L453 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L446-L453 type MappingLimitSettingsNestedObjects struct { // Limit The maximum number of nested JSON objects that a single document can contain // across all nested types. This limit helps @@ -31,6 +41,42 @@ type MappingLimitSettingsNestedObjects struct { Limit *int `json:"limit,omitempty"` } +func (s *MappingLimitSettingsNestedObjects) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsNestedObjects returns a MappingLimitSettingsNestedObjects. func NewMappingLimitSettingsNestedObjects() *MappingLimitSettingsNestedObjects { r := &MappingLimitSettingsNestedObjects{} diff --git a/typedapi/types/mappinglimitsettingstotalfields.go b/typedapi/types/mappinglimitsettingstotalfields.go old mode 100755 new mode 100644 index 09f00846a0..94973ebc5d --- a/typedapi/types/mappinglimitsettingstotalfields.go +++ b/typedapi/types/mappinglimitsettingstotalfields.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingLimitSettingsTotalFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L417-L425 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L417-L425 type MappingLimitSettingsTotalFields struct { // Limit The maximum number of fields in an index. Field and object mappings, as well // as field aliases count towards this limit. @@ -33,6 +43,42 @@ type MappingLimitSettingsTotalFields struct { Limit *int `json:"limit,omitempty"` } +func (s *MappingLimitSettingsTotalFields) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Limit = &value + case float64: + f := int(v) + s.Limit = &f + } + + } + } + return nil +} + // NewMappingLimitSettingsTotalFields returns a MappingLimitSettingsTotalFields. func NewMappingLimitSettingsTotalFields() *MappingLimitSettingsTotalFields { r := &MappingLimitSettingsTotalFields{} diff --git a/typedapi/types/mappingstats.go b/typedapi/types/mappingstats.go old mode 100755 new mode 100644 index e007cc69f3..4f9bc834bd --- a/typedapi/types/mappingstats.go +++ b/typedapi/types/mappingstats.go @@ -16,19 +16,84 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MappingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L177-L181 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L177-L181 type MappingStats struct { TotalCount int64 `json:"total_count"` TotalEstimatedOverhead ByteSize `json:"total_estimated_overhead,omitempty"` TotalEstimatedOverheadInBytes int64 `json:"total_estimated_overhead_in_bytes"` } +func (s *MappingStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "total_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalCount = value + case float64: + f := int64(v) + s.TotalCount = f + } + + case "total_estimated_overhead": + if err := dec.Decode(&s.TotalEstimatedOverhead); err != nil { + return err + } + + case "total_estimated_overhead_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalEstimatedOverheadInBytes = value + case float64: + f := int64(v) + s.TotalEstimatedOverheadInBytes = f + } + + } + } + return nil +} + // NewMappingStats returns a MappingStats. func NewMappingStats() *MappingStats { r := &MappingStats{} diff --git a/typedapi/types/masterisstableindicator.go b/typedapi/types/masterisstableindicator.go new file mode 100644 index 0000000000..0111d337d3 --- /dev/null +++ b/typedapi/types/masterisstableindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// MasterIsStableIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L78-L82 +type MasterIsStableIndicator struct { + Details *MasterIsStableIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewMasterIsStableIndicator returns a MasterIsStableIndicator. +func NewMasterIsStableIndicator() *MasterIsStableIndicator { + r := &MasterIsStableIndicator{} + + return r +} diff --git a/typedapi/types/masterisstableindicatorclusterformationnode.go b/typedapi/types/masterisstableindicatorclusterformationnode.go new file mode 100644 index 0000000000..db21c392cb --- /dev/null +++ b/typedapi/types/masterisstableindicatorclusterformationnode.go @@ -0,0 +1,37 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// MasterIsStableIndicatorClusterFormationNode type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L97-L101 +type MasterIsStableIndicatorClusterFormationNode struct { + ClusterFormationMessage string `json:"cluster_formation_message"` + Name *string `json:"name,omitempty"` + NodeId string `json:"node_id"` +} + +// NewMasterIsStableIndicatorClusterFormationNode returns a MasterIsStableIndicatorClusterFormationNode. +func NewMasterIsStableIndicatorClusterFormationNode() *MasterIsStableIndicatorClusterFormationNode { + r := &MasterIsStableIndicatorClusterFormationNode{} + + return r +} diff --git a/typedapi/types/masterisstableindicatordetails.go b/typedapi/types/masterisstableindicatordetails.go new file mode 100644 index 0000000000..5565ca3603 --- /dev/null +++ b/typedapi/types/masterisstableindicatordetails.go @@ -0,0 +1,38 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// MasterIsStableIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L83-L88 +type MasterIsStableIndicatorDetails struct { + ClusterFormation []MasterIsStableIndicatorClusterFormationNode `json:"cluster_formation,omitempty"` + CurrentMaster IndicatorNode `json:"current_master"` + ExceptionFetchingHistory *MasterIsStableIndicatorExceptionFetchingHistory `json:"exception_fetching_history,omitempty"` + RecentMasters []IndicatorNode `json:"recent_masters"` +} + +// NewMasterIsStableIndicatorDetails returns a MasterIsStableIndicatorDetails. +func NewMasterIsStableIndicatorDetails() *MasterIsStableIndicatorDetails { + r := &MasterIsStableIndicatorDetails{} + + return r +} diff --git a/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go new file mode 100644 index 0000000000..9d678bfe51 --- /dev/null +++ b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go @@ -0,0 +1,36 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// MasterIsStableIndicatorExceptionFetchingHistory type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L93-L96 +type MasterIsStableIndicatorExceptionFetchingHistory struct { + Message string `json:"message"` + StackTrace string `json:"stack_trace"` +} + +// NewMasterIsStableIndicatorExceptionFetchingHistory returns a MasterIsStableIndicatorExceptionFetchingHistory. +func NewMasterIsStableIndicatorExceptionFetchingHistory() *MasterIsStableIndicatorExceptionFetchingHistory { + r := &MasterIsStableIndicatorExceptionFetchingHistory{} + + return r +} diff --git a/typedapi/types/masterrecord.go b/typedapi/types/masterrecord.go old mode 100755 new mode 100644 index ec7ab1e36a..323c07fede --- a/typedapi/types/masterrecord.go +++ b/typedapi/types/masterrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MasterRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/master/types.ts#L20-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/master/types.ts#L20-L39 type MasterRecord struct { // Host host name Host *string `json:"host,omitempty"` diff --git a/typedapi/types/matchallquery.go b/typedapi/types/matchallquery.go old mode 100755 new mode 100644 index b2c914039a..1da84c6a59 --- a/typedapi/types/matchallquery.go +++ b/typedapi/types/matchallquery.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MatchAllQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 type MatchAllQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` } +func (s *MatchAllQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewMatchAllQuery returns a MatchAllQuery. func NewMatchAllQuery() *MatchAllQuery { r := &MatchAllQuery{} diff --git a/typedapi/types/matchboolprefixquery.go b/typedapi/types/matchboolprefixquery.go old mode 100755 new mode 100644 index da97a28cf3..de7feaea76 --- a/typedapi/types/matchboolprefixquery.go +++ b/typedapi/types/matchboolprefixquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/operator" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MatchBoolPrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L160-L171 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L160-L171 type MatchBoolPrefixQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -41,6 +49,137 @@ type MatchBoolPrefixQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Query) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "fuzzy_rewrite": + if err := dec.Decode(&s.FuzzyRewrite); err != nil { + return err + } + + case "fuzzy_transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FuzzyTranspositions = &value + case bool: + s.FuzzyTranspositions = &v + } + + case "max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxExpansions = &value + case float64: + f := int(v) + s.MaxExpansions = &f + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "operator": + if err := dec.Decode(&s.Operator); err != nil { + return err + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewMatchBoolPrefixQuery returns a MatchBoolPrefixQuery. func NewMatchBoolPrefixQuery() *MatchBoolPrefixQuery { r := &MatchBoolPrefixQuery{} diff --git a/typedapi/types/matchnonequery.go b/typedapi/types/matchnonequery.go old mode 100755 new mode 100644 index ee3c25235d..9ce1b56c2c --- a/typedapi/types/matchnonequery.go +++ b/typedapi/types/matchnonequery.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MatchNoneQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 type MatchNoneQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` } +func (s *MatchNoneQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewMatchNoneQuery returns a MatchNoneQuery. func NewMatchNoneQuery() *MatchNoneQuery { r := &MatchNoneQuery{} diff --git a/typedapi/types/matchonlytextproperty.go b/typedapi/types/matchonlytextproperty.go old mode 100755 new mode 100644 index 05559b8488..54d7443ff2 --- a/typedapi/types/matchonlytextproperty.go +++ b/typedapi/types/matchonlytextproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // MatchOnlyTextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L208-L233 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L208-L233 type MatchOnlyTextProperty struct { // CopyTo Allows you to copy the values of multiple fields into a group // field, which can then be queried as a single field. @@ -46,6 +46,7 @@ type MatchOnlyTextProperty struct { } func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -60,11 +61,25 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -352,13 +367,16 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } diff --git a/typedapi/types/matchphraseprefixquery.go b/typedapi/types/matchphraseprefixquery.go old mode 100755 new mode 100644 index 41f3999dc4..11f520d4d8 --- a/typedapi/types/matchphraseprefixquery.go +++ b/typedapi/types/matchphraseprefixquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/zerotermsquery" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MatchPhrasePrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L182-L189 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L182-L189 type MatchPhrasePrefixQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -37,6 +45,108 @@ type MatchPhrasePrefixQuery struct { ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` } +func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Query) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxExpansions = &value + case float64: + f := int(v) + s.MaxExpansions = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "slop": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Slop = &value + case float64: + f := int(v) + s.Slop = &f + } + + case "zero_terms_query": + if err := dec.Decode(&s.ZeroTermsQuery); err != nil { + return err + } + + } + } + return nil +} + // NewMatchPhrasePrefixQuery returns a MatchPhrasePrefixQuery. func NewMatchPhrasePrefixQuery() *MatchPhrasePrefixQuery { r := &MatchPhrasePrefixQuery{} diff --git a/typedapi/types/matchphrasequery.go b/typedapi/types/matchphrasequery.go old mode 100755 new mode 100644 index 4288fb5dd4..4682e31c51 --- a/typedapi/types/matchphrasequery.go +++ b/typedapi/types/matchphrasequery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/zerotermsquery" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MatchPhraseQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L173-L180 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L173-L180 type MatchPhraseQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -36,6 +44,92 @@ type MatchPhraseQuery struct { ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` } +func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Query) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "slop": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Slop = &value + case float64: + f := int(v) + s.Slop = &f + } + + case "zero_terms_query": + if err := dec.Decode(&s.ZeroTermsQuery); err != nil { + return err + } + + } + } + return nil +} + // NewMatchPhraseQuery returns a MatchPhraseQuery. func NewMatchPhraseQuery() *MatchPhraseQuery { r := &MatchPhraseQuery{} diff --git a/typedapi/types/matchquery.go b/typedapi/types/matchquery.go old mode 100755 new mode 100644 index 6d32a4a1b5..928bb641c6 --- a/typedapi/types/matchquery.go +++ b/typedapi/types/matchquery.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/operator" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/zerotermsquery" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L133-L158 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L133-L158 type MatchQuery struct { Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` @@ -46,6 +54,186 @@ type MatchQuery struct { ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` } +func (s *MatchQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Query) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "auto_generate_synonyms_phrase_query": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AutoGenerateSynonymsPhraseQuery = &value + case bool: + s.AutoGenerateSynonymsPhraseQuery = &v + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "cutoff_frequency": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.CutoffFrequency = &f + case float64: + f := Float64(v) + s.CutoffFrequency = &f + } + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "fuzzy_rewrite": + if err := dec.Decode(&s.FuzzyRewrite); err != nil { + return err + } + + case "fuzzy_transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FuzzyTranspositions = &value + case bool: + s.FuzzyTranspositions = &v + } + + case "lenient": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v + } + + case "max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxExpansions = &value + case float64: + f := int(v) + s.MaxExpansions = &f + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "operator": + if err := dec.Decode(&s.Operator); err != nil { + return err + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "zero_terms_query": + if err := dec.Decode(&s.ZeroTermsQuery); err != nil { + return err + } + + } + } + return nil +} + // NewMatchQuery returns a MatchQuery. func NewMatchQuery() *MatchQuery { r := &MatchQuery{} diff --git a/typedapi/types/matrixaggregation.go b/typedapi/types/matrixaggregation.go old mode 100755 new mode 100644 index 7ac8b260af..5bb6d09671 --- a/typedapi/types/matrixaggregation.go +++ b/typedapi/types/matrixaggregation.go @@ -16,22 +16,83 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // MatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/matrix.ts#L26-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/matrix.ts#L26-L29 type MatrixAggregation struct { - Fields []string `json:"fields,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing map[string]Float64 `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` + Fields []string `json:"fields,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing map[string]Float64 `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + if s.Missing == nil { + s.Missing = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewMatrixAggregation returns a MatrixAggregation. diff --git a/typedapi/types/matrixstatsaggregate.go b/typedapi/types/matrixstatsaggregate.go old mode 100755 new mode 100644 index 4c41394837..8b78311c4e --- a/typedapi/types/matrixstatsaggregate.go +++ b/typedapi/types/matrixstatsaggregate.go @@ -16,21 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // MatrixStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L748-L752 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L757-L761 type MatrixStatsAggregate struct { - DocCount int64 `json:"doc_count"` - Fields []MatrixStatsFields `json:"fields,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + DocCount int64 `json:"doc_count"` + Fields []MatrixStatsFields `json:"fields,omitempty"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *MatrixStatsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil } // NewMatrixStatsAggregate returns a MatrixStatsAggregate. diff --git a/typedapi/types/matrixstatsaggregation.go b/typedapi/types/matrixstatsaggregation.go old mode 100755 new mode 100644 index a2a3289c23..4ca0921443 --- a/typedapi/types/matrixstatsaggregation.go +++ b/typedapi/types/matrixstatsaggregation.go @@ -16,25 +16,91 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortmode" + + "bytes" + "errors" + "io" + + "encoding/json" ) // MatrixStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/matrix.ts#L31-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/matrix.ts#L31-L33 type MatrixStatsAggregation struct { - Fields []string `json:"fields,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing map[string]Float64 `json:"missing,omitempty"` - Mode *sortmode.SortMode `json:"mode,omitempty"` - Name *string `json:"name,omitempty"` + Fields []string `json:"fields,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing map[string]Float64 `json:"missing,omitempty"` + Mode *sortmode.SortMode `json:"mode,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + if s.Missing == nil { + s.Missing = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewMatrixStatsAggregation returns a MatrixStatsAggregation. diff --git a/typedapi/types/matrixstatsfields.go b/typedapi/types/matrixstatsfields.go old mode 100755 new mode 100644 index fcca5e0a8d..e58bcb2e3e --- a/typedapi/types/matrixstatsfields.go +++ b/typedapi/types/matrixstatsfields.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MatrixStatsFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L754-L763 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L763-L772 type MatrixStatsFields struct { Correlation map[string]Float64 `json:"correlation"` Count int64 `json:"count"` @@ -34,6 +44,126 @@ type MatrixStatsFields struct { Variance Float64 `json:"variance"` } +func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "correlation": + if s.Correlation == nil { + s.Correlation = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Correlation); err != nil { + return err + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "covariance": + if s.Covariance == nil { + s.Covariance = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.Covariance); err != nil { + return err + } + + case "kurtosis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Kurtosis = f + case float64: + f := Float64(v) + s.Kurtosis = f + } + + case "mean": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Mean = f + case float64: + f := Float64(v) + s.Mean = f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "skewness": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Skewness = f + case float64: + f := Float64(v) + s.Skewness = f + } + + case "variance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Variance = f + case float64: + f := Float64(v) + s.Variance = f + } + + } + } + return nil +} + // NewMatrixStatsFields returns a MatrixStatsFields. func NewMatrixStatsFields() *MatrixStatsFields { r := &MatrixStatsFields{ diff --git a/typedapi/types/maxaggregate.go b/typedapi/types/maxaggregate.go old mode 100755 new mode 100644 index dd3ae596e0..fc8a367bb5 --- a/typedapi/types/maxaggregate.go +++ b/typedapi/types/maxaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // MaxAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L199-L200 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L200-L201 type MaxAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type MaxAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *MaxAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewMaxAggregate returns a MaxAggregate. func NewMaxAggregate() *MaxAggregate { r := &MaxAggregate{} diff --git a/typedapi/types/maxaggregation.go b/typedapi/types/maxaggregation.go old mode 100755 new mode 100644 index 6decb90cfb..d33830e3e9 --- a/typedapi/types/maxaggregation.go +++ b/typedapi/types/maxaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MaxAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L97-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L97-L97 type MaxAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type MaxAggregation struct { Script Script `json:"script,omitempty"` } +func (s *MaxAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewMaxAggregation returns a MaxAggregation. func NewMaxAggregation() *MaxAggregation { r := &MaxAggregation{} diff --git a/typedapi/types/maxbucketaggregation.go b/typedapi/types/maxbucketaggregation.go old mode 100755 new mode 100644 index 422611e283..c441e9862f --- a/typedapi/types/maxbucketaggregation.go +++ b/typedapi/types/maxbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // MaxBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L184-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L184-L184 type MaxBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/medianabsolutedeviationaggregate.go b/typedapi/types/medianabsolutedeviationaggregate.go old mode 100755 new mode 100644 index a07f27fbbe..a57b1456bc --- a/typedapi/types/medianabsolutedeviationaggregate.go +++ b/typedapi/types/medianabsolutedeviationaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // MedianAbsoluteDeviationAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L193-L194 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L194-L195 type MedianAbsoluteDeviationAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type MedianAbsoluteDeviationAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *MedianAbsoluteDeviationAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewMedianAbsoluteDeviationAggregate returns a MedianAbsoluteDeviationAggregate. func NewMedianAbsoluteDeviationAggregate() *MedianAbsoluteDeviationAggregate { r := &MedianAbsoluteDeviationAggregate{} diff --git a/typedapi/types/medianabsolutedeviationaggregation.go b/typedapi/types/medianabsolutedeviationaggregation.go old mode 100755 new mode 100644 index 49ca74346d..c7820d995b --- a/typedapi/types/medianabsolutedeviationaggregation.go +++ b/typedapi/types/medianabsolutedeviationaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MedianAbsoluteDeviationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L99-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L99-L101 type MedianAbsoluteDeviationAggregation struct { Compression *Float64 `json:"compression,omitempty"` Field *string `json:"field,omitempty"` @@ -31,6 +41,65 @@ type MedianAbsoluteDeviationAggregation struct { Script Script `json:"script,omitempty"` } +func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compression": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Compression = &f + case float64: + f := Float64(v) + s.Compression = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewMedianAbsoluteDeviationAggregation returns a MedianAbsoluteDeviationAggregation. func NewMedianAbsoluteDeviationAggregation() *MedianAbsoluteDeviationAggregation { r := &MedianAbsoluteDeviationAggregation{} diff --git a/typedapi/types/memmlstats.go b/typedapi/types/memmlstats.go old mode 100755 new mode 100644 index 6597a01928..ae2b2de3ed --- a/typedapi/types/memmlstats.go +++ b/typedapi/types/memmlstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MemMlStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_memory_stats/types.ts#L90-L111 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_memory_stats/types.ts#L90-L111 type MemMlStats struct { // AnomalyDetectors Amount of native memory set aside for anomaly detection jobs. AnomalyDetectors ByteSize `json:"anomaly_detectors,omitempty"` @@ -52,6 +62,131 @@ type MemMlStats struct { NativeInferenceInBytes int `json:"native_inference_in_bytes"` } +func (s *MemMlStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "anomaly_detectors": + if err := dec.Decode(&s.AnomalyDetectors); err != nil { + return err + } + + case "anomaly_detectors_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AnomalyDetectorsInBytes = value + case float64: + f := int(v) + s.AnomalyDetectorsInBytes = f + } + + case "data_frame_analytics": + if err := dec.Decode(&s.DataFrameAnalytics); err != nil { + return err + } + + case "data_frame_analytics_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DataFrameAnalyticsInBytes = value + case float64: + f := int(v) + s.DataFrameAnalyticsInBytes = f + } + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "max_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxInBytes = value + case float64: + f := int(v) + s.MaxInBytes = f + } + + case "native_code_overhead": + if err := dec.Decode(&s.NativeCodeOverhead); err != nil { + return err + } + + case "native_code_overhead_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NativeCodeOverheadInBytes = value + case float64: + f := int(v) + s.NativeCodeOverheadInBytes = f + } + + case "native_inference": + if err := dec.Decode(&s.NativeInference); err != nil { + return err + } + + case "native_inference_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NativeInferenceInBytes = value + case float64: + f := int(v) + s.NativeInferenceInBytes = f + } + + } + } + return nil +} + // NewMemMlStats returns a MemMlStats. func NewMemMlStats() *MemMlStats { r := &MemMlStats{} diff --git a/typedapi/types/memory.go b/typedapi/types/memory.go old mode 100755 new mode 100644 index ad054a6830..3e70ad3494 --- a/typedapi/types/memory.go +++ b/typedapi/types/memory.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Memory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_memory_stats/types.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_memory_stats/types.ts#L25-L48 type Memory struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` @@ -39,6 +47,64 @@ type Memory struct { TransportAddress string `json:"transport_address"` } +func (s *Memory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "ephemeral_id": + if err := dec.Decode(&s.EphemeralId); err != nil { + return err + } + + case "jvm": + if err := dec.Decode(&s.Jvm); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewMemory returns a Memory. func NewMemory() *Memory { r := &Memory{ diff --git a/typedapi/types/memorystats.go b/typedapi/types/memorystats.go old mode 100755 new mode 100644 index 0f5d433b00..6f64e5996c --- a/typedapi/types/memorystats.go +++ b/typedapi/types/memorystats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L248-L259 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L248-L259 type MemoryStats struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` @@ -36,6 +46,155 @@ type MemoryStats struct { UsedInBytes *int64 `json:"used_in_bytes,omitempty"` } +func (s *MemoryStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adjusted_total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AdjustedTotalInBytes = &value + case float64: + f := int64(v) + s.AdjustedTotalInBytes = &f + } + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = &value + case float64: + f := int64(v) + s.FreeInBytes = &f + } + + case "resident": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Resident = &o + + case "resident_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ResidentInBytes = &value + case float64: + f := int64(v) + s.ResidentInBytes = &f + } + + case "share": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Share = &o + + case "share_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShareInBytes = &value + case float64: + f := int64(v) + s.ShareInBytes = &f + } + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = &value + case float64: + f := int64(v) + s.TotalInBytes = &f + } + + case "total_virtual": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TotalVirtual = &o + + case "total_virtual_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalVirtualInBytes = &value + case float64: + f := int64(v) + s.TotalVirtualInBytes = &f + } + + case "used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedInBytes = &value + case float64: + f := int64(v) + s.UsedInBytes = &f + } + + } + } + return nil +} + // NewMemoryStats returns a MemoryStats. func NewMemoryStats() *MemoryStats { r := &MemoryStats{} diff --git a/typedapi/types/memstats.go b/typedapi/types/memstats.go old mode 100755 new mode 100644 index 66f913d8f1..f83a9b5f8f --- a/typedapi/types/memstats.go +++ b/typedapi/types/memstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MemStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/get_memory_stats/types.ts#L65-L88 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/get_memory_stats/types.ts#L65-L88 type MemStats struct { // AdjustedTotal If the amount of physical memory has been overridden using the // es.total_memory_bytes system property @@ -42,6 +52,73 @@ type MemStats struct { TotalInBytes int `json:"total_in_bytes"` } +func (s *MemStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adjusted_total": + if err := dec.Decode(&s.AdjustedTotal); err != nil { + return err + } + + case "adjusted_total_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AdjustedTotalInBytes = value + case float64: + f := int(v) + s.AdjustedTotalInBytes = f + } + + case "ml": + if err := dec.Decode(&s.Ml); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + case "total_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalInBytes = value + case float64: + f := int(v) + s.TotalInBytes = f + } + + } + } + return nil +} + // NewMemStats returns a MemStats. func NewMemStats() *MemStats { r := &MemStats{} diff --git a/typedapi/types/merge.go b/typedapi/types/merge.go old mode 100755 new mode 100644 index d864e01b5d..63008beb10 --- a/typedapi/types/merge.go +++ b/typedapi/types/merge.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Merge type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L323-L325 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L323-L325 type Merge struct { Scheduler *MergeScheduler `json:"scheduler,omitempty"` } diff --git a/typedapi/types/mergescheduler.go b/typedapi/types/mergescheduler.go old mode 100755 new mode 100644 index ff8eff6d80..2e10c968d6 --- a/typedapi/types/mergescheduler.go +++ b/typedapi/types/mergescheduler.go @@ -16,16 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MergeScheduler type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L327-L330 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L327-L330 type MergeScheduler struct { - MaxMergeCount *int `json:"max_merge_count,omitempty"` - MaxThreadCount *int `json:"max_thread_count,omitempty"` + MaxMergeCount Stringifiedinteger `json:"max_merge_count,omitempty"` + MaxThreadCount Stringifiedinteger `json:"max_thread_count,omitempty"` +} + +func (s *MergeScheduler) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_merge_count": + if err := dec.Decode(&s.MaxMergeCount); err != nil { + return err + } + + case "max_thread_count": + if err := dec.Decode(&s.MaxThreadCount); err != nil { + return err + } + + } + } + return nil } // NewMergeScheduler returns a MergeScheduler. diff --git a/typedapi/types/mergesstats.go b/typedapi/types/mergesstats.go old mode 100755 new mode 100644 index 6a74fc1444..3c794a3545 --- a/typedapi/types/mergesstats.go +++ b/typedapi/types/mergesstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MergesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L119-L136 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L119-L136 type MergesStats struct { Current int64 `json:"current"` CurrentDocs int64 `json:"current_docs"` @@ -42,6 +52,185 @@ type MergesStats struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *MergesStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = value + case float64: + f := int64(v) + s.Current = f + } + + case "current_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CurrentDocs = value + case float64: + f := int64(v) + s.CurrentDocs = f + } + + case "current_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CurrentSize = &o + + case "current_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CurrentSizeInBytes = value + case float64: + f := int64(v) + s.CurrentSizeInBytes = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "total_auto_throttle": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TotalAutoThrottle = &o + + case "total_auto_throttle_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalAutoThrottleInBytes = value + case float64: + f := int64(v) + s.TotalAutoThrottleInBytes = f + } + + case "total_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalDocs = value + case float64: + f := int64(v) + s.TotalDocs = f + } + + case "total_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TotalSize = &o + + case "total_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSizeInBytes = value + case float64: + f := int64(v) + s.TotalSizeInBytes = f + } + + case "total_stopped_time": + if err := dec.Decode(&s.TotalStoppedTime); err != nil { + return err + } + + case "total_stopped_time_in_millis": + if err := dec.Decode(&s.TotalStoppedTimeInMillis); err != nil { + return err + } + + case "total_throttled_time": + if err := dec.Decode(&s.TotalThrottledTime); err != nil { + return err + } + + case "total_throttled_time_in_millis": + if err := dec.Decode(&s.TotalThrottledTimeInMillis); err != nil { + return err + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewMergesStats returns a MergesStats. func NewMergesStats() *MergesStats { r := &MergesStats{} diff --git a/typedapi/types/metadata.go b/typedapi/types/metadata.go old mode 100755 new mode 100644 index 54b9c24531..ec268eaeca --- a/typedapi/types/metadata.go +++ b/typedapi/types/metadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ import "encoding/json" // Metadata type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L91-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L91-L91 type Metadata map[string]json.RawMessage diff --git a/typedapi/types/metrics.go b/typedapi/types/metrics.go old mode 100755 new mode 100644 index d4ad5362b7..86ee17af41 --- a/typedapi/types/metrics.go +++ b/typedapi/types/metrics.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Metrics type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L70-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L70-L70 type Metrics []string diff --git a/typedapi/types/mgetoperation.go b/typedapi/types/mgetoperation.go old mode 100755 new mode 100644 index 7d0b32e177..ab742dac9c --- a/typedapi/types/mgetoperation.go +++ b/typedapi/types/mgetoperation.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // MgetOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mget/types.ts#L32-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mget/types.ts#L32-L55 type MgetOperation struct { // Id_ The unique document ID. Id_ string `json:"_id"` @@ -43,6 +49,72 @@ type MgetOperation struct { VersionType *versiontype.VersionType `json:"version_type,omitempty"` } +func (s *MgetOperation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "stored_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.StoredFields = append(s.StoredFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { + return err + } + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "version_type": + if err := dec.Decode(&s.VersionType); err != nil { + return err + } + + } + } + return nil +} + // NewMgetOperation returns a MgetOperation. func NewMgetOperation() *MgetOperation { r := &MgetOperation{} diff --git a/typedapi/types/migrationfeatureindexinfo.go b/typedapi/types/migrationfeatureindexinfo.go old mode 100755 new mode 100644 index f8537decd2..33c0083ff1 --- a/typedapi/types/migrationfeatureindexinfo.go +++ b/typedapi/types/migrationfeatureindexinfo.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MigrationFeatureIndexInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 type MigrationFeatureIndexInfo struct { FailureCause *ErrorCause `json:"failure_cause,omitempty"` Index string `json:"index"` Version string `json:"version"` } +func (s *MigrationFeatureIndexInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "failure_cause": + if err := dec.Decode(&s.FailureCause); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewMigrationFeatureIndexInfo returns a MigrationFeatureIndexInfo. func NewMigrationFeatureIndexInfo() *MigrationFeatureIndexInfo { r := &MigrationFeatureIndexInfo{} diff --git a/typedapi/types/minaggregate.go b/typedapi/types/minaggregate.go old mode 100755 new mode 100644 index 73cf20e0fa..0fe9c2cde3 --- a/typedapi/types/minaggregate.go +++ b/typedapi/types/minaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // MinAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L196-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L197-L198 type MinAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type MinAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *MinAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewMinAggregate returns a MinAggregate. func NewMinAggregate() *MinAggregate { r := &MinAggregate{} diff --git a/typedapi/types/minaggregation.go b/typedapi/types/minaggregation.go old mode 100755 new mode 100644 index 19af716997..108145a716 --- a/typedapi/types/minaggregation.go +++ b/typedapi/types/minaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MinAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L103-L103 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L103-L103 type MinAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type MinAggregation struct { Script Script `json:"script,omitempty"` } +func (s *MinAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewMinAggregation returns a MinAggregation. func NewMinAggregation() *MinAggregation { r := &MinAggregation{} diff --git a/typedapi/types/minbucketaggregation.go b/typedapi/types/minbucketaggregation.go old mode 100755 new mode 100644 index f9eb1ca593..61dd7f4e6d --- a/typedapi/types/minbucketaggregation.go +++ b/typedapi/types/minbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // MinBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L186-L186 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L186-L186 type MinBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/minimallicenseinformation.go b/typedapi/types/minimallicenseinformation.go old mode 100755 new mode 100644 index 717db6bb36..8fa7b6a87c --- a/typedapi/types/minimallicenseinformation.go +++ b/typedapi/types/minimallicenseinformation.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensestatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/licensetype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // MinimalLicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/types.ts#L34-L40 type MinimalLicenseInformation struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` Mode licensetype.LicenseType `json:"mode"` @@ -36,6 +42,54 @@ type MinimalLicenseInformation struct { Uid string `json:"uid"` } +func (s *MinimalLicenseInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expiry_date_in_millis": + if err := dec.Decode(&s.ExpiryDateInMillis); err != nil { + return err + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "uid": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Uid = o + + } + } + return nil +} + // NewMinimalLicenseInformation returns a MinimalLicenseInformation. func NewMinimalLicenseInformation() *MinimalLicenseInformation { r := &MinimalLicenseInformation{} diff --git a/typedapi/types/minimumshouldmatch.go b/typedapi/types/minimumshouldmatch.go old mode 100755 new mode 100644 index 7b93aae9f0..a0fbfd7137 --- a/typedapi/types/minimumshouldmatch.go +++ b/typedapi/types/minimumshouldmatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L143-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L143-L147 type MinimumShouldMatch interface{} diff --git a/typedapi/types/missing.go b/typedapi/types/missing.go old mode 100755 new mode 100644 index 9d1bfd901c..0d9755c293 --- a/typedapi/types/missing.go +++ b/typedapi/types/missing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,5 +27,5 @@ package types // Float64 // bool // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/AggregationContainer.ts#L211-L211 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/AggregationContainer.ts#L213-L213 type Missing interface{} diff --git a/typedapi/types/missingaggregate.go b/typedapi/types/missingaggregate.go old mode 100755 new mode 100644 index 37a00d91ad..73ac946c0f --- a/typedapi/types/missingaggregate.go +++ b/typedapi/types/missingaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // MissingAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L482-L483 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L483-L484 type MissingAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *MissingAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s MissingAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/missingaggregation.go b/typedapi/types/missingaggregation.go old mode 100755 new mode 100644 index d92f43c171..cb8fb168b2 --- a/typedapi/types/missingaggregation.go +++ b/typedapi/types/missingaggregation.go @@ -16,22 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // MissingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L260-L263 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L260-L263 type MissingAggregation struct { - Field *string `json:"field,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing Missing `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *MissingAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil } // NewMissingAggregation returns a MissingAggregation. diff --git a/typedapi/types/mlcounter.go b/typedapi/types/mlcounter.go old mode 100755 new mode 100644 index f31c025f1f..285d9585a5 --- a/typedapi/types/mlcounter.go +++ b/typedapi/types/mlcounter.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlCounter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L249-L251 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L249-L251 type MlCounter struct { Count int64 `json:"count"` } +func (s *MlCounter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + } + } + return nil +} + // NewMlCounter returns a MlCounter. func NewMlCounter() *MlCounter { r := &MlCounter{} diff --git a/typedapi/types/mldatafeed.go b/typedapi/types/mldatafeed.go old mode 100755 new mode 100644 index 3fb6a84c06..e93fcc07e1 --- a/typedapi/types/mldatafeed.go +++ b/typedapi/types/mldatafeed.go @@ -16,33 +16,171 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MLDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L37-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L37-L58 type MLDatafeed struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // Authorization The security privileges that the datafeed uses to run its queries. If Elastic // Stack security features were disabled at the time of the most recent update // to the datafeed, this property is omitted. - Authorization *DatafeedAuthorization `json:"authorization,omitempty"` - ChunkingConfig *ChunkingConfig `json:"chunking_config,omitempty"` - DatafeedId string `json:"datafeed_id"` - DelayedDataCheckConfig DelayedDataCheckConfig `json:"delayed_data_check_config"` - Frequency Duration `json:"frequency,omitempty"` - Indexes []string `json:"indexes,omitempty"` - Indices []string `json:"indices"` - IndicesOptions *IndicesOptions `json:"indices_options,omitempty"` - JobId string `json:"job_id"` - MaxEmptySearches *int `json:"max_empty_searches,omitempty"` - Query Query `json:"query"` - QueryDelay Duration `json:"query_delay,omitempty"` - RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` - ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` - ScrollSize *int `json:"scroll_size,omitempty"` + Authorization *DatafeedAuthorization `json:"authorization,omitempty"` + ChunkingConfig *ChunkingConfig `json:"chunking_config,omitempty"` + DatafeedId string `json:"datafeed_id"` + DelayedDataCheckConfig DelayedDataCheckConfig `json:"delayed_data_check_config"` + Frequency Duration `json:"frequency,omitempty"` + Indexes []string `json:"indexes,omitempty"` + Indices []string `json:"indices"` + IndicesOptions *IndicesOptions `json:"indices_options,omitempty"` + JobId string `json:"job_id"` + MaxEmptySearches *int `json:"max_empty_searches,omitempty"` + Query Query `json:"query"` + QueryDelay Duration `json:"query_delay,omitempty"` + RuntimeMappings RuntimeFields `json:"runtime_mappings,omitempty"` + ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` + ScrollSize *int `json:"scroll_size,omitempty"` +} + +func (s *MLDatafeed) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations", "aggs": + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregations, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return err + } + + case "authorization": + if err := dec.Decode(&s.Authorization); err != nil { + return err + } + + case "chunking_config": + if err := dec.Decode(&s.ChunkingConfig); err != nil { + return err + } + + case "datafeed_id": + if err := dec.Decode(&s.DatafeedId); err != nil { + return err + } + + case "delayed_data_check_config": + if err := dec.Decode(&s.DelayedDataCheckConfig); err != nil { + return err + } + + case "frequency": + if err := dec.Decode(&s.Frequency); err != nil { + return err + } + + case "indexes": + if err := dec.Decode(&s.Indexes); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "indices_options": + if err := dec.Decode(&s.IndicesOptions); err != nil { + return err + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "max_empty_searches": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxEmptySearches = &value + case float64: + f := int(v) + s.MaxEmptySearches = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "query_delay": + if err := dec.Decode(&s.QueryDelay); err != nil { + return err + } + + case "runtime_mappings": + if err := dec.Decode(&s.RuntimeMappings); err != nil { + return err + } + + case "script_fields": + if s.ScriptFields == nil { + s.ScriptFields = make(map[string]ScriptField, 0) + } + if err := dec.Decode(&s.ScriptFields); err != nil { + return err + } + + case "scroll_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ScrollSize = &value + case float64: + f := int(v) + s.ScrollSize = &f + } + + } + } + return nil } // NewMLDatafeed returns a MLDatafeed. diff --git a/typedapi/types/mldataframeanalyticsjobs.go b/typedapi/types/mldataframeanalyticsjobs.go old mode 100755 new mode 100644 index 1dfd23a116..3131a40f5b --- a/typedapi/types/mldataframeanalyticsjobs.go +++ b/typedapi/types/mldataframeanalyticsjobs.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MlDataFrameAnalyticsJobs type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L177-L182 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L177-L182 type MlDataFrameAnalyticsJobs struct { All_ MlDataFrameAnalyticsJobsCount `json:"_all"` AnalysisCounts *MlDataFrameAnalyticsJobsAnalysis `json:"analysis_counts,omitempty"` diff --git a/typedapi/types/mldataframeanalyticsjobsanalysis.go b/typedapi/types/mldataframeanalyticsjobsanalysis.go old mode 100755 new mode 100644 index a3aaa10a35..5b19e3bb31 --- a/typedapi/types/mldataframeanalyticsjobsanalysis.go +++ b/typedapi/types/mldataframeanalyticsjobsanalysis.go @@ -16,19 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlDataFrameAnalyticsJobsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L184-L188 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L184-L188 type MlDataFrameAnalyticsJobsAnalysis struct { Classification *int `json:"classification,omitempty"` OutlierDetection *int `json:"outlier_detection,omitempty"` Regression *int `json:"regression,omitempty"` } +func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classification": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Classification = &value + case float64: + f := int(v) + s.Classification = &f + } + + case "outlier_detection": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OutlierDetection = &value + case float64: + f := int(v) + s.OutlierDetection = &f + } + + case "regression": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Regression = &value + case float64: + f := int(v) + s.Regression = &f + } + + } + } + return nil +} + // NewMlDataFrameAnalyticsJobsAnalysis returns a MlDataFrameAnalyticsJobsAnalysis. func NewMlDataFrameAnalyticsJobsAnalysis() *MlDataFrameAnalyticsJobsAnalysis { r := &MlDataFrameAnalyticsJobsAnalysis{} diff --git a/typedapi/types/mldataframeanalyticsjobscount.go b/typedapi/types/mldataframeanalyticsjobscount.go old mode 100755 new mode 100644 index fed2d15290..9e7e0d66c2 --- a/typedapi/types/mldataframeanalyticsjobscount.go +++ b/typedapi/types/mldataframeanalyticsjobscount.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlDataFrameAnalyticsJobsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L194-L196 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L194-L196 type MlDataFrameAnalyticsJobsCount struct { Count int64 `json:"count"` } +func (s *MlDataFrameAnalyticsJobsCount) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + } + } + return nil +} + // NewMlDataFrameAnalyticsJobsCount returns a MlDataFrameAnalyticsJobsCount. func NewMlDataFrameAnalyticsJobsCount() *MlDataFrameAnalyticsJobsCount { r := &MlDataFrameAnalyticsJobsCount{} diff --git a/typedapi/types/mldataframeanalyticsjobsmemory.go b/typedapi/types/mldataframeanalyticsjobsmemory.go old mode 100755 new mode 100644 index 1b516d4ec5..28270e1e88 --- a/typedapi/types/mldataframeanalyticsjobsmemory.go +++ b/typedapi/types/mldataframeanalyticsjobsmemory.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MlDataFrameAnalyticsJobsMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L190-L192 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L190-L192 type MlDataFrameAnalyticsJobsMemory struct { PeakUsageBytes JobStatistics `json:"peak_usage_bytes"` } diff --git a/typedapi/types/mlfilter.go b/typedapi/types/mlfilter.go old mode 100755 new mode 100644 index 53305e0cdc..c76f67443a --- a/typedapi/types/mlfilter.go +++ b/typedapi/types/mlfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MLFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Filter.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Filter.ts#L22-L29 type MLFilter struct { // Description A description of the filter. Description *string `json:"description,omitempty"` @@ -32,6 +40,44 @@ type MLFilter struct { Items []string `json:"items"` } +func (s *MLFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "filter_id": + if err := dec.Decode(&s.FilterId); err != nil { + return err + } + + case "items": + if err := dec.Decode(&s.Items); err != nil { + return err + } + + } + } + return nil +} + // NewMLFilter returns a MLFilter. func NewMLFilter() *MLFilter { r := &MLFilter{} diff --git a/typedapi/types/mlinference.go b/typedapi/types/mlinference.go old mode 100755 new mode 100644 index 082ec9e48d..2d8184d663 --- a/typedapi/types/mlinference.go +++ b/typedapi/types/mlinference.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MlInference type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L198-L203 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L198-L203 type MlInference struct { Deployments *MlInferenceDeployments `json:"deployments,omitempty"` IngestProcessors map[string]MlInferenceIngestProcessor `json:"ingest_processors"` diff --git a/typedapi/types/mlinferencedeployments.go b/typedapi/types/mlinferencedeployments.go old mode 100755 new mode 100644 index bf5aa6ad92..5ce1d52b84 --- a/typedapi/types/mlinferencedeployments.go +++ b/typedapi/types/mlinferencedeployments.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlInferenceDeployments type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L221-L226 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L221-L226 type MlInferenceDeployments struct { Count int `json:"count"` InferenceCounts JobStatistics `json:"inference_counts"` @@ -30,6 +40,57 @@ type MlInferenceDeployments struct { TimeMs MlInferenceDeploymentsTimeMs `json:"time_ms"` } +func (s *MlInferenceDeployments) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "inference_counts": + if err := dec.Decode(&s.InferenceCounts); err != nil { + return err + } + + case "model_sizes_bytes": + if err := dec.Decode(&s.ModelSizesBytes); err != nil { + return err + } + + case "time_ms": + if err := dec.Decode(&s.TimeMs); err != nil { + return err + } + + } + } + return nil +} + // NewMlInferenceDeployments returns a MlInferenceDeployments. func NewMlInferenceDeployments() *MlInferenceDeployments { r := &MlInferenceDeployments{} diff --git a/typedapi/types/mlinferencedeploymentstimems.go b/typedapi/types/mlinferencedeploymentstimems.go old mode 100755 new mode 100644 index fb18331255..28da101ea9 --- a/typedapi/types/mlinferencedeploymentstimems.go +++ b/typedapi/types/mlinferencedeploymentstimems.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlInferenceDeploymentsTimeMs type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L228-L230 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L228-L230 type MlInferenceDeploymentsTimeMs struct { Avg Float64 `json:"avg"` } +func (s *MlInferenceDeploymentsTimeMs) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Avg = f + case float64: + f := Float64(v) + s.Avg = f + } + + } + } + return nil +} + // NewMlInferenceDeploymentsTimeMs returns a MlInferenceDeploymentsTimeMs. func NewMlInferenceDeploymentsTimeMs() *MlInferenceDeploymentsTimeMs { r := &MlInferenceDeploymentsTimeMs{} diff --git a/typedapi/types/mlinferenceingestprocessor.go b/typedapi/types/mlinferenceingestprocessor.go old mode 100755 new mode 100644 index f97f068674..a8a6cc1cea --- a/typedapi/types/mlinferenceingestprocessor.go +++ b/typedapi/types/mlinferenceingestprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MlInferenceIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L205-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L205-L210 type MlInferenceIngestProcessor struct { NumDocsProcessed MlInferenceIngestProcessorCount `json:"num_docs_processed"` NumFailures MlInferenceIngestProcessorCount `json:"num_failures"` diff --git a/typedapi/types/mlinferenceingestprocessorcount.go b/typedapi/types/mlinferenceingestprocessorcount.go old mode 100755 new mode 100644 index 516848289a..5b993d6171 --- a/typedapi/types/mlinferenceingestprocessorcount.go +++ b/typedapi/types/mlinferenceingestprocessorcount.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlInferenceIngestProcessorCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L232-L236 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L232-L236 type MlInferenceIngestProcessorCount struct { Max int64 `json:"max"` Min int64 `json:"min"` Sum int64 `json:"sum"` } +func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Max = value + case float64: + f := int64(v) + s.Max = f + } + + case "min": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Min = value + case float64: + f := int64(v) + s.Min = f + } + + case "sum": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Sum = value + case float64: + f := int64(v) + s.Sum = f + } + + } + } + return nil +} + // NewMlInferenceIngestProcessorCount returns a MlInferenceIngestProcessorCount. func NewMlInferenceIngestProcessorCount() *MlInferenceIngestProcessorCount { r := &MlInferenceIngestProcessorCount{} diff --git a/typedapi/types/mlinferencetrainedmodels.go b/typedapi/types/mlinferencetrainedmodels.go old mode 100755 new mode 100644 index 0a6c061d1c..d0f3363b90 --- a/typedapi/types/mlinferencetrainedmodels.go +++ b/typedapi/types/mlinferencetrainedmodels.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // MlInferenceTrainedModels type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L212-L219 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L212-L219 type MlInferenceTrainedModels struct { All_ MlCounter `json:"_all"` Count *MlInferenceTrainedModelsCount `json:"count,omitempty"` diff --git a/typedapi/types/mlinferencetrainedmodelscount.go b/typedapi/types/mlinferencetrainedmodelscount.go old mode 100755 new mode 100644 index 8cfae622d2..a9f3b0a3e6 --- a/typedapi/types/mlinferencetrainedmodelscount.go +++ b/typedapi/types/mlinferencetrainedmodelscount.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlInferenceTrainedModelsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L238-L247 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L238-L247 type MlInferenceTrainedModelsCount struct { Classification *int64 `json:"classification,omitempty"` Ner *int64 `json:"ner,omitempty"` @@ -34,6 +44,146 @@ type MlInferenceTrainedModelsCount struct { Total int64 `json:"total"` } +func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classification": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Classification = &value + case float64: + f := int64(v) + s.Classification = &f + } + + case "ner": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Ner = &value + case float64: + f := int64(v) + s.Ner = &f + } + + case "other": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Other = value + case float64: + f := int64(v) + s.Other = f + } + + case "pass_through": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PassThrough = &value + case float64: + f := int64(v) + s.PassThrough = &f + } + + case "prepackaged": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Prepackaged = value + case float64: + f := int64(v) + s.Prepackaged = f + } + + case "regression": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Regression = &value + case float64: + f := int64(v) + s.Regression = &f + } + + case "text_embedding": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TextEmbedding = &value + case float64: + f := int64(v) + s.TextEmbedding = &f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewMlInferenceTrainedModelsCount returns a MlInferenceTrainedModelsCount. func NewMlInferenceTrainedModelsCount() *MlInferenceTrainedModelsCount { r := &MlInferenceTrainedModelsCount{} diff --git a/typedapi/types/mljobforecasts.go b/typedapi/types/mljobforecasts.go old mode 100755 new mode 100644 index e6e9ec267d..1a9730c335 --- a/typedapi/types/mljobforecasts.go +++ b/typedapi/types/mljobforecasts.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MlJobForecasts type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L172-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L172-L175 type MlJobForecasts struct { ForecastedJobs int64 `json:"forecasted_jobs"` Total int64 `json:"total"` } +func (s *MlJobForecasts) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "forecasted_jobs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ForecastedJobs = value + case float64: + f := int64(v) + s.ForecastedJobs = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewMlJobForecasts returns a MlJobForecasts. func NewMlJobForecasts() *MlJobForecasts { r := &MlJobForecasts{} diff --git a/typedapi/types/modelplotconfig.go b/typedapi/types/modelplotconfig.go old mode 100755 new mode 100644 index 77f9e41c21..0d823994cf --- a/typedapi/types/modelplotconfig.go +++ b/typedapi/types/modelplotconfig.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ModelPlotConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/ModelPlot.ts#L23-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/ModelPlot.ts#L23-L40 type ModelPlotConfig struct { // AnnotationsEnabled If true, enables calculation and storage of the model change annotations for // each entity that is being analyzed. @@ -37,6 +47,59 @@ type ModelPlotConfig struct { Terms *string `json:"terms,omitempty"` } +func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "annotations_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AnnotationsEnabled = &value + case bool: + s.AnnotationsEnabled = &v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "terms": + if err := dec.Decode(&s.Terms); err != nil { + return err + } + + } + } + return nil +} + // NewModelPlotConfig returns a ModelPlotConfig. func NewModelPlotConfig() *ModelPlotConfig { r := &ModelPlotConfig{} diff --git a/typedapi/types/modelsizestats.go b/typedapi/types/modelsizestats.go old mode 100755 new mode 100644 index 36b107df44..81d2d97265 --- a/typedapi/types/modelsizestats.go +++ b/typedapi/types/modelsizestats.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/categorizationstatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/memorystatus" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L56-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L56-L78 type ModelSizeStats struct { AssignmentMemoryBasis *string `json:"assignment_memory_basis,omitempty"` BucketAllocationFailuresCount int64 `json:"bucket_allocation_failures_count"` @@ -52,6 +60,253 @@ type ModelSizeStats struct { TotalPartitionFieldCount int64 `json:"total_partition_field_count"` } +func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_memory_basis": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentMemoryBasis = &o + + case "bucket_allocation_failures_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BucketAllocationFailuresCount = value + case float64: + f := int64(v) + s.BucketAllocationFailuresCount = f + } + + case "categorization_status": + if err := dec.Decode(&s.CategorizationStatus); err != nil { + return err + } + + case "categorized_doc_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CategorizedDocCount = value + case float64: + f := int(v) + s.CategorizedDocCount = f + } + + case "dead_category_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DeadCategoryCount = value + case float64: + f := int(v) + s.DeadCategoryCount = f + } + + case "failed_category_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailedCategoryCount = value + case float64: + f := int(v) + s.FailedCategoryCount = f + } + + case "frequent_category_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FrequentCategoryCount = value + case float64: + f := int(v) + s.FrequentCategoryCount = f + } + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "log_time": + if err := dec.Decode(&s.LogTime); err != nil { + return err + } + + case "memory_status": + if err := dec.Decode(&s.MemoryStatus); err != nil { + return err + } + + case "model_bytes": + if err := dec.Decode(&s.ModelBytes); err != nil { + return err + } + + case "model_bytes_exceeded": + if err := dec.Decode(&s.ModelBytesExceeded); err != nil { + return err + } + + case "model_bytes_memory_limit": + if err := dec.Decode(&s.ModelBytesMemoryLimit); err != nil { + return err + } + + case "peak_model_bytes": + if err := dec.Decode(&s.PeakModelBytes); err != nil { + return err + } + + case "rare_category_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RareCategoryCount = value + case float64: + f := int(v) + s.RareCategoryCount = f + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + case "total_by_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalByFieldCount = value + case float64: + f := int64(v) + s.TotalByFieldCount = f + } + + case "total_category_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalCategoryCount = value + case float64: + f := int(v) + s.TotalCategoryCount = f + } + + case "total_over_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalOverFieldCount = value + case float64: + f := int64(v) + s.TotalOverFieldCount = f + } + + case "total_partition_field_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalPartitionFieldCount = value + case float64: + f := int64(v) + s.TotalPartitionFieldCount = f + } + + } + } + return nil +} + // NewModelSizeStats returns a ModelSizeStats. func NewModelSizeStats() *ModelSizeStats { r := &ModelSizeStats{} diff --git a/typedapi/types/modelsnapshot.go b/typedapi/types/modelsnapshot.go old mode 100755 new mode 100644 index ad4c16cd93..44387f32a7 --- a/typedapi/types/modelsnapshot.go +++ b/typedapi/types/modelsnapshot.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ModelSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L25-L46 type ModelSnapshot struct { // Description An optional description of the job. Description *string `json:"description,omitempty"` @@ -49,6 +59,130 @@ type ModelSnapshot struct { Timestamp int64 `json:"timestamp"` } +func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "latest_record_time_stamp": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LatestRecordTimeStamp = &value + case float64: + f := int(v) + s.LatestRecordTimeStamp = &f + } + + case "latest_result_time_stamp": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LatestResultTimeStamp = &value + case float64: + f := int(v) + s.LatestResultTimeStamp = &f + } + + case "min_version": + if err := dec.Decode(&s.MinVersion); err != nil { + return err + } + + case "model_size_stats": + if err := dec.Decode(&s.ModelSizeStats); err != nil { + return err + } + + case "retain": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Retain = value + case bool: + s.Retain = v + } + + case "snapshot_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SnapshotDocCount = value + case float64: + f := int64(v) + s.SnapshotDocCount = f + } + + case "snapshot_id": + if err := dec.Decode(&s.SnapshotId); err != nil { + return err + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = value + case float64: + f := int64(v) + s.Timestamp = f + } + + } + } + return nil +} + // NewModelSnapshot returns a ModelSnapshot. func NewModelSnapshot() *ModelSnapshot { r := &ModelSnapshot{} diff --git a/typedapi/types/modelsnapshotupgrade.go b/typedapi/types/modelsnapshotupgrade.go old mode 100755 new mode 100644 index d4616c977f..9d90670069 --- a/typedapi/types/modelsnapshotupgrade.go +++ b/typedapi/types/modelsnapshotupgrade.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/snapshotupgradestate" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ModelSnapshotUpgrade type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Model.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Model.ts#L48-L54 type ModelSnapshotUpgrade struct { AssignmentExplanation string `json:"assignment_explanation"` JobId string `json:"job_id"` @@ -35,6 +41,54 @@ type ModelSnapshotUpgrade struct { State snapshotupgradestate.SnapshotUpgradeState `json:"state"` } +func (s *ModelSnapshotUpgrade) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_explanation": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AssignmentExplanation = o + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "snapshot_id": + if err := dec.Decode(&s.SnapshotId); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + } + } + return nil +} + // NewModelSnapshotUpgrade returns a ModelSnapshotUpgrade. func NewModelSnapshotUpgrade() *ModelSnapshotUpgrade { r := &ModelSnapshotUpgrade{} diff --git a/typedapi/types/monitoring.go b/typedapi/types/monitoring.go old mode 100755 new mode 100644 index 5d5d7e9263..267d76e64f --- a/typedapi/types/monitoring.go +++ b/typedapi/types/monitoring.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Monitoring type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L372-L375 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L372-L375 type Monitoring struct { Available bool `json:"available"` CollectionEnabled bool `json:"collection_enabled"` @@ -30,6 +40,76 @@ type Monitoring struct { EnabledExporters map[string]int64 `json:"enabled_exporters"` } +func (s *Monitoring) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "collection_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CollectionEnabled = value + case bool: + s.CollectionEnabled = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "enabled_exporters": + if s.EnabledExporters == nil { + s.EnabledExporters = make(map[string]int64, 0) + } + if err := dec.Decode(&s.EnabledExporters); err != nil { + return err + } + + } + } + return nil +} + // NewMonitoring returns a Monitoring. func NewMonitoring() *Monitoring { r := &Monitoring{ diff --git a/typedapi/types/morelikethisquery.go b/typedapi/types/morelikethisquery.go old mode 100755 new mode 100644 index b2bd6257b3..87c21a1076 --- a/typedapi/types/morelikethisquery.go +++ b/typedapi/types/morelikethisquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MoreLikeThisQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L62-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L62-L89 type MoreLikeThisQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -51,6 +59,279 @@ type MoreLikeThisQuery struct { VersionType *versiontype.VersionType `json:"version_type,omitempty"` } +func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "boost_terms": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.BoostTerms = &f + case float64: + f := Float64(v) + s.BoostTerms = &f + } + + case "fail_on_unsupported_field": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FailOnUnsupportedField = &value + case bool: + s.FailOnUnsupportedField = &v + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "include": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Include = &value + case bool: + s.Include = &v + } + + case "like": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(Like) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Like = append(s.Like, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Like); err != nil { + return err + } + } + + case "max_doc_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDocFreq = &value + case float64: + f := int(v) + s.MaxDocFreq = &f + } + + case "max_query_terms": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxQueryTerms = &value + case float64: + f := int(v) + s.MaxQueryTerms = &f + } + + case "max_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxWordLength = &value + case float64: + f := int(v) + s.MaxWordLength = &f + } + + case "min_doc_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocFreq = &value + case float64: + f := int(v) + s.MinDocFreq = &f + } + + case "min_term_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinTermFreq = &value + case float64: + f := int(v) + s.MinTermFreq = &f + } + + case "min_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordLength = &value + case float64: + f := int(v) + s.MinWordLength = &f + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "per_field_analyzer": + if s.PerFieldAnalyzer == nil { + s.PerFieldAnalyzer = make(map[string]string, 0) + } + if err := dec.Decode(&s.PerFieldAnalyzer); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "stop_words": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.StopWords = append(s.StopWords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StopWords); err != nil { + return err + } + } + + case "unlike": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(Like) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Unlike = append(s.Unlike, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Unlike); err != nil { + return err + } + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "version_type": + if err := dec.Decode(&s.VersionType); err != nil { + return err + } + + } + } + return nil +} + // NewMoreLikeThisQuery returns a MoreLikeThisQuery. func NewMoreLikeThisQuery() *MoreLikeThisQuery { r := &MoreLikeThisQuery{ diff --git a/typedapi/types/mountedsnapshot.go b/typedapi/types/mountedsnapshot.go old mode 100755 new mode 100644 index a443ab2269..7586179777 --- a/typedapi/types/mountedsnapshot.go +++ b/typedapi/types/mountedsnapshot.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MountedSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/mount/types.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/mount/types.ts#L23-L27 type MountedSnapshot struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` Snapshot string `json:"snapshot"` } +func (s *MountedSnapshot) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "snapshot": + if err := dec.Decode(&s.Snapshot); err != nil { + return err + } + + } + } + return nil +} + // NewMountedSnapshot returns a MountedSnapshot. func NewMountedSnapshot() *MountedSnapshot { r := &MountedSnapshot{} diff --git a/typedapi/types/movingaverageaggregation.go b/typedapi/types/movingaverageaggregation.go old mode 100755 new mode 100644 index 51c6aff928..5abe091d76 --- a/typedapi/types/movingaverageaggregation.go +++ b/typedapi/types/movingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,5 +28,5 @@ package types // HoltMovingAverageAggregation // HoltWintersMovingAverageAggregation // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L188-L194 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L188-L194 type MovingAverageAggregation interface{} diff --git a/typedapi/types/movingfunctionaggregation.go b/typedapi/types/movingfunctionaggregation.go old mode 100755 new mode 100644 index 4b748d338e..49648dad73 --- a/typedapi/types/movingfunctionaggregation.go +++ b/typedapi/types/movingfunctionaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,25 +27,28 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // MovingFunctionAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L250-L254 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L250-L254 type MovingFunctionAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Script *string `json:"script,omitempty"` - Shift *int `json:"shift,omitempty"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Script *string `json:"script,omitempty"` + Shift *int `json:"shift,omitempty"` + Window *int `json:"window,omitempty"` } func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,9 +68,12 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -80,23 +86,51 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "script": - if err := dec.Decode(&s.Script); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Script = &o case "shift": - if err := dec.Decode(&s.Shift); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shift = &value + case float64: + f := int(v) + s.Shift = &f } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/movingpercentilesaggregation.go b/typedapi/types/movingpercentilesaggregation.go old mode 100755 new mode 100644 index 6557b09645..7483f805cd --- a/typedapi/types/movingpercentilesaggregation.go +++ b/typedapi/types/movingpercentilesaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,25 +27,28 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // MovingPercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L256-L260 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L256-L260 type MovingPercentilesAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Shift *int `json:"shift,omitempty"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Shift *int `json:"shift,omitempty"` + Window *int `json:"window,omitempty"` } func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,9 +68,12 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -75,8 +81,17 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - if err := dec.Decode(&s.Keyed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v } case "meta": @@ -85,18 +100,43 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "shift": - if err := dec.Decode(&s.Shift); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shift = &value + case float64: + f := int(v) + s.Shift = &f } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/mtermvectorsoperation.go b/typedapi/types/mtermvectorsoperation.go old mode 100755 new mode 100644 index 57f669a8a0..6841f30c69 --- a/typedapi/types/mtermvectorsoperation.go +++ b/typedapi/types/mtermvectorsoperation.go @@ -16,19 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MTermVectorsOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mtermvectors/types.ts#L35-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mtermvectors/types.ts#L35-L49 type MTermVectorsOperation struct { Doc json.RawMessage `json:"doc,omitempty"` FieldStatistics *bool `json:"field_statistics,omitempty"` @@ -45,6 +51,147 @@ type MTermVectorsOperation struct { VersionType *versiontype.VersionType `json:"version_type,omitempty"` } +func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc": + if err := dec.Decode(&s.Doc); err != nil { + return err + } + + case "field_statistics": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FieldStatistics = &value + case bool: + s.FieldStatistics = &v + } + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "offsets": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Offsets = &value + case bool: + s.Offsets = &v + } + + case "payloads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Payloads = &value + case bool: + s.Payloads = &v + } + + case "positions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Positions = &value + case bool: + s.Positions = &v + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "term_statistics": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TermStatistics = &value + case bool: + s.TermStatistics = &v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "version_type": + if err := dec.Decode(&s.VersionType); err != nil { + return err + } + + } + } + return nil +} + // NewMTermVectorsOperation returns a MTermVectorsOperation. func NewMTermVectorsOperation() *MTermVectorsOperation { r := &MTermVectorsOperation{} diff --git a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go old mode 100755 new mode 100644 index 9e025b46c0..ee52777636 --- a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go +++ b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseAdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseAdjacencyMatrixBucket struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseAdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseAdjacencyMatrixBucket) UnmarshalJSON(data []byt source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]AdjacencyMatrixBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []AdjacencyMatrixBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasecompositebucket.go b/typedapi/types/multibucketaggregatebasecompositebucket.go old mode 100755 new mode 100644 index 0cc49ebd62..25f9a42dbb --- a/typedapi/types/multibucketaggregatebasecompositebucket.go +++ b/typedapi/types/multibucketaggregatebasecompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseCompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseCompositeBucket struct { - Buckets BucketsCompositeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsCompositeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseCompositeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseCompositeBucket) UnmarshalJSON(data []byte) err source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]CompositeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []CompositeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go old mode 100755 new mode 100644 index 943c604878..7498c9c73c --- a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseDateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDateHistogramBucket struct { Buckets BucketsDateHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseDateHistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseDateHistogramBucket) UnmarshalJSON(data []byte) source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DateHistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DateHistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go old mode 100755 new mode 100644 index 8041a5f218..e59eb6c309 --- a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go +++ b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDoubleTermsBucket struct { - Buckets BucketsDoubleTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsDoubleTermsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) e source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DoubleTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DoubleTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasefiltersbucket.go b/typedapi/types/multibucketaggregatebasefiltersbucket.go old mode 100755 new mode 100644 index 13f2bd275a..83b0227aea --- a/typedapi/types/multibucketaggregatebasefiltersbucket.go +++ b/typedapi/types/multibucketaggregatebasefiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseFiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseFiltersBucket struct { - Buckets BucketsFiltersBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsFiltersBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseFiltersBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseFiltersBucket) UnmarshalJSON(data []byte) error source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]FiltersBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []FiltersBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go new file mode 100644 index 0000000000..99ec48a72d --- /dev/null +++ b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go @@ -0,0 +1,90 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// MultiBucketAggregateBaseFrequentItemSetsBucket type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 +type MultiBucketAggregateBaseFrequentItemSetsBucket struct { + Buckets BucketsFrequentItemSetsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *MultiBucketAggregateBaseFrequentItemSetsBucket) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buckets": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(map[string]FrequentItemSetsBucket, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Buckets = o + case '[': + o := []FrequentItemSetsBucket{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Buckets = o + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil +} + +// NewMultiBucketAggregateBaseFrequentItemSetsBucket returns a MultiBucketAggregateBaseFrequentItemSetsBucket. +func NewMultiBucketAggregateBaseFrequentItemSetsBucket() *MultiBucketAggregateBaseFrequentItemSetsBucket { + r := &MultiBucketAggregateBaseFrequentItemSetsBucket{} + + return r +} diff --git a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go old mode 100755 new mode 100644 index 5ad3099dfb..f6c38f3fec --- a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseGeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHashGridBucket struct { - Buckets BucketsGeoHashGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoHashGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseGeoHashGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseGeoHashGridBucket) UnmarshalJSON(data []byte) e source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoHashGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoHashGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go old mode 100755 new mode 100644 index 9fe235d483..17943a06f2 --- a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseGeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHexGridBucket struct { - Buckets BucketsGeoHexGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoHexGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseGeoHexGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseGeoHexGridBucket) UnmarshalJSON(data []byte) er source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoHexGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoHexGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go old mode 100755 new mode 100644 index bb43d6e850..5039809b8a --- a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseGeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoTileGridBucket struct { - Buckets BucketsGeoTileGridBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsGeoTileGridBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseGeoTileGridBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseGeoTileGridBucket) UnmarshalJSON(data []byte) e source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]GeoTileGridBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []GeoTileGridBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasehistogrambucket.go b/typedapi/types/multibucketaggregatebasehistogrambucket.go old mode 100755 new mode 100644 index 9f43b59d22..d8d23b75f5 --- a/typedapi/types/multibucketaggregatebasehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseHistogramBucket struct { - Buckets BucketsHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsHistogramBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseHistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseHistogramBucket) UnmarshalJSON(data []byte) err source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]HistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []HistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebaseipprefixbucket.go b/typedapi/types/multibucketaggregatebaseipprefixbucket.go old mode 100755 new mode 100644 index ff5253c539..69ca33b65e --- a/typedapi/types/multibucketaggregatebaseipprefixbucket.go +++ b/typedapi/types/multibucketaggregatebaseipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseIpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpPrefixBucket struct { - Buckets BucketsIpPrefixBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsIpPrefixBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseIpPrefixBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseIpPrefixBucket) UnmarshalJSON(data []byte) erro source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]IpPrefixBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []IpPrefixBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebaseiprangebucket.go b/typedapi/types/multibucketaggregatebaseiprangebucket.go old mode 100755 new mode 100644 index fb30bd0ba4..72b3d34e63 --- a/typedapi/types/multibucketaggregatebaseiprangebucket.go +++ b/typedapi/types/multibucketaggregatebaseiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseIpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpRangeBucket struct { - Buckets BucketsIpRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsIpRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseIpRangeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseIpRangeBucket) UnmarshalJSON(data []byte) error source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]IpRangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []IpRangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go old mode 100755 new mode 100644 index 3704992db9..3636ef6297 --- a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseLongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongRareTermsBucket struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseLongRareTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseLongRareTermsBucket) UnmarshalJSON(data []byte) source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]LongRareTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []LongRareTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebaselongtermsbucket.go b/typedapi/types/multibucketaggregatebaselongtermsbucket.go old mode 100755 new mode 100644 index 0fd6c69a1e..4b8c1b9b43 --- a/typedapi/types/multibucketaggregatebaselongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongTermsBucket struct { - Buckets BucketsLongTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsLongTermsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) err source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]LongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []LongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasemultitermsbucket.go b/typedapi/types/multibucketaggregatebasemultitermsbucket.go old mode 100755 new mode 100644 index ab997c871c..bacdce04f6 --- a/typedapi/types/multibucketaggregatebasemultitermsbucket.go +++ b/typedapi/types/multibucketaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseMultiTermsBucket struct { - Buckets BucketsMultiTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsMultiTermsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) er source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]MultiTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []MultiTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebaserangebucket.go b/typedapi/types/multibucketaggregatebaserangebucket.go old mode 100755 new mode 100644 index 77738d4179..93073dabfd --- a/typedapi/types/multibucketaggregatebaserangebucket.go +++ b/typedapi/types/multibucketaggregatebaserangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseRangeBucket struct { - Buckets BucketsRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseRangeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseRangeBucket) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]RangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []RangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go old mode 100755 new mode 100644 index aa5c31ba54..12b586ef44 --- a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantLongTermsBucket struct { Buckets BucketsSignificantLongTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantLongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantLongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go old mode 100755 new mode 100644 index fbbd13c719..66a9650118 --- a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantStringTermsBucket struct { Buckets BucketsSignificantStringTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(dat source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantStringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantStringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go old mode 100755 new mode 100644 index 6bd75fcc62..6d8854c739 --- a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseStringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringRareTermsBucket struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseStringRareTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseStringRareTermsBucket) UnmarshalJSON(data []byt source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]StringRareTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []StringRareTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasestringtermsbucket.go b/typedapi/types/multibucketaggregatebasestringtermsbucket.go old mode 100755 new mode 100644 index e5e0374239..b99483dfc2 --- a/typedapi/types/multibucketaggregatebasestringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringTermsBucket struct { - Buckets BucketsStringTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsStringTermsBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) e source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]StringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []StringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go old mode 100755 new mode 100644 index f2fe053876..c7ca9a3c48 --- a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseVariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVariableWidthHistogramBucket struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseVariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseVariableWidthHistogramBucket) UnmarshalJSON(dat source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]VariableWidthHistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []VariableWidthHistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multibucketaggregatebasevoid.go b/typedapi/types/multibucketaggregatebasevoid.go old mode 100755 new mode 100644 index 4f4a34268f..0bba33a38a --- a/typedapi/types/multibucketaggregatebasevoid.go +++ b/typedapi/types/multibucketaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // MultiBucketAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVoid struct { - Buckets BucketsVoid `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsVoid `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *MultiBucketAggregateBaseVoid) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *MultiBucketAggregateBaseVoid) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/multigeterror.go b/typedapi/types/multigeterror.go old mode 100755 new mode 100644 index f9e1da5023..0c01c7a8c7 --- a/typedapi/types/multigeterror.go +++ b/typedapi/types/multigeterror.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MultiGetError type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mget/types.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mget/types.ts#L62-L66 type MultiGetError struct { Error ErrorCause `json:"error"` Id_ string `json:"_id"` Index_ string `json:"_index"` } +func (s *MultiGetError) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "error": + if err := dec.Decode(&s.Error); err != nil { + return err + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + } + } + return nil +} + // NewMultiGetError returns a MultiGetError. func NewMultiGetError() *MultiGetError { r := &MultiGetError{} diff --git a/typedapi/types/multimatchquery.go b/typedapi/types/multimatchquery.go old mode 100755 new mode 100644 index 50c5a5ec44..146df1b1e3 --- a/typedapi/types/multimatchquery.go +++ b/typedapi/types/multimatchquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,11 +24,19 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/operator" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/textquerytype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/zerotermsquery" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // MultiMatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L191-L217 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L191-L217 type MultiMatchQuery struct { Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` @@ -51,6 +59,234 @@ type MultiMatchQuery struct { ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` } +func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "auto_generate_synonyms_phrase_query": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AutoGenerateSynonymsPhraseQuery = &value + case bool: + s.AutoGenerateSynonymsPhraseQuery = &v + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "cutoff_frequency": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.CutoffFrequency = &f + case float64: + f := Float64(v) + s.CutoffFrequency = &f + } + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "fuzzy_rewrite": + if err := dec.Decode(&s.FuzzyRewrite); err != nil { + return err + } + + case "fuzzy_transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FuzzyTranspositions = &value + case bool: + s.FuzzyTranspositions = &v + } + + case "lenient": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v + } + + case "max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxExpansions = &value + case float64: + f := int(v) + s.MaxExpansions = &f + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "operator": + if err := dec.Decode(&s.Operator); err != nil { + return err + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "slop": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Slop = &value + case float64: + f := int(v) + s.Slop = &f + } + + case "tie_breaker": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.TieBreaker = &f + case float64: + f := Float64(v) + s.TieBreaker = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "zero_terms_query": + if err := dec.Decode(&s.ZeroTermsQuery); err != nil { + return err + } + + } + } + return nil +} + // NewMultiMatchQuery returns a MultiMatchQuery. func NewMultiMatchQuery() *MultiMatchQuery { r := &MultiMatchQuery{} diff --git a/typedapi/types/multiplexertokenfilter.go b/typedapi/types/multiplexertokenfilter.go old mode 100755 new mode 100644 index 1fcf49a562..f4a48da529 --- a/typedapi/types/multiplexertokenfilter.go +++ b/typedapi/types/multiplexertokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MultiplexerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L259-L263 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L260-L264 type MultiplexerTokenFilter struct { Filters []string `json:"filters"` PreserveOriginal *bool `json:"preserve_original,omitempty"` @@ -30,6 +40,55 @@ type MultiplexerTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *MultiplexerTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filters": + if err := dec.Decode(&s.Filters); err != nil { + return err + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewMultiplexerTokenFilter returns a MultiplexerTokenFilter. func NewMultiplexerTokenFilter() *MultiplexerTokenFilter { r := &MultiplexerTokenFilter{} diff --git a/typedapi/types/multitermlookup.go b/typedapi/types/multitermlookup.go old mode 100755 new mode 100644 index a23c1adc15..d576ff8458 --- a/typedapi/types/multitermlookup.go +++ b/typedapi/types/multitermlookup.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // MultiTermLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L276-L279 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L276-L279 type MultiTermLookup struct { Field string `json:"field"` Missing Missing `json:"missing,omitempty"` } +func (s *MultiTermLookup) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + } + } + return nil +} + // NewMultiTermLookup returns a MultiTermLookup. func NewMultiTermLookup() *MultiTermLookup { r := &MultiTermLookup{} diff --git a/typedapi/types/multitermsaggregate.go b/typedapi/types/multitermsaggregate.go old mode 100755 new mode 100644 index e8c9432e4b..ef61bec57a --- a/typedapi/types/multitermsaggregate.go +++ b/typedapi/types/multitermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // MultiTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L460-L462 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L461-L463 type MultiTermsAggregate struct { - Buckets BucketsMultiTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsMultiTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]MultiTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []MultiTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/multitermsaggregation.go b/typedapi/types/multitermsaggregation.go old mode 100755 new mode 100644 index baca0bddb8..706833c00c --- a/typedapi/types/multitermsaggregation.go +++ b/typedapi/types/multitermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,15 +28,17 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // MultiTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L265-L274 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L265-L274 type MultiTermsAggregation struct { CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` Order AggregateOrder `json:"order,omitempty"` @@ -48,6 +50,7 @@ type MultiTermsAggregation struct { } func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -72,14 +75,27 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { } case "min_doc_count": - if err := dec.Decode(&s.MinDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int64(v) + s.MinDocCount = &f } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "order": @@ -88,36 +104,79 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o - case '[': o := make([]map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o } case "shard_min_doc_count": - if err := dec.Decode(&s.ShardMinDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShardMinDocCount = &value + case float64: + f := int64(v) + s.ShardMinDocCount = &f } case "shard_size": - if err := dec.Decode(&s.ShardSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f } case "show_term_doc_count_error": - if err := dec.Decode(&s.ShowTermDocCountError); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ShowTermDocCountError = &value + case bool: + s.ShowTermDocCountError = &v } case "size": - if err := dec.Decode(&s.Size); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f } case "terms": diff --git a/typedapi/types/multitermsbucket.go b/typedapi/types/multitermsbucket.go old mode 100755 new mode 100644 index 2b67d68ae8..f0ba9f7724 --- a/typedapi/types/multitermsbucket.go +++ b/typedapi/types/multitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // MultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L464-L468 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L465-L469 type MultiTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -44,6 +46,7 @@ type MultiTermsBucket struct { } func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,456 +60,34 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "key": @@ -515,9 +96,525 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -543,6 +640,7 @@ func (s MultiTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/murmur3hashproperty.go b/typedapi/types/murmur3hashproperty.go old mode 100755 new mode 100644 index a9cea7caa3..8d4dc993bd --- a/typedapi/types/murmur3hashproperty.go +++ b/typedapi/types/murmur3hashproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // Murmur3HashProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L74-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L74-L76 type Murmur3HashProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -48,6 +50,7 @@ type Murmur3HashProperty struct { } func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,13 +65,33 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -77,6 +100,9 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -364,23 +390,40 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -668,20 +711,32 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/mutualinformationheuristic.go b/typedapi/types/mutualinformationheuristic.go old mode 100755 new mode 100644 index 9cf3aceec0..f8d76c3491 --- a/typedapi/types/mutualinformationheuristic.go +++ b/typedapi/types/mutualinformationheuristic.go @@ -16,18 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // MutualInformationHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L331-L334 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L331-L334 type MutualInformationHeuristic struct { BackgroundIsSuperset *bool `json:"background_is_superset,omitempty"` IncludeNegatives *bool `json:"include_negatives,omitempty"` } +func (s *MutualInformationHeuristic) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "background_is_superset": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BackgroundIsSuperset = &value + case bool: + s.BackgroundIsSuperset = &v + } + + case "include_negatives": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeNegatives = &value + case bool: + s.IncludeNegatives = &v + } + + } + } + return nil +} + // NewMutualInformationHeuristic returns a MutualInformationHeuristic. func NewMutualInformationHeuristic() *MutualInformationHeuristic { r := &MutualInformationHeuristic{} diff --git a/typedapi/types/names.go b/typedapi/types/names.go old mode 100755 new mode 100644 index 4dc4a84342..ef5d91f19e --- a/typedapi/types/names.go +++ b/typedapi/types/names.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Names type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L73-L73 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L73-L73 type Names []string diff --git a/typedapi/types/nativecode.go b/typedapi/types/nativecode.go old mode 100755 new mode 100644 index c1be2a867d..1a28542a19 --- a/typedapi/types/nativecode.go +++ b/typedapi/types/nativecode.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NativeCode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/info/types.ts#L29-L32 type NativeCode struct { BuildHash string `json:"build_hash"` Version string `json:"version"` } +func (s *NativeCode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build_hash": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildHash = o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNativeCode returns a NativeCode. func NewNativeCode() *NativeCode { r := &NativeCode{} diff --git a/typedapi/types/nativecodeinformation.go b/typedapi/types/nativecodeinformation.go old mode 100755 new mode 100644 index 741ad1d672..2495dc50c2 --- a/typedapi/types/nativecodeinformation.go +++ b/typedapi/types/nativecodeinformation.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NativeCodeInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/types.ts#L29-L32 type NativeCodeInformation struct { BuildHash string `json:"build_hash"` Version string `json:"version"` } +func (s *NativeCodeInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build_hash": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildHash = o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNativeCodeInformation returns a NativeCodeInformation. func NewNativeCodeInformation() *NativeCodeInformation { r := &NativeCodeInformation{} diff --git a/typedapi/types/nerinferenceoptions.go b/typedapi/types/nerinferenceoptions.go old mode 100755 new mode 100644 index 713b2ed5a5..e6a5cfbfce --- a/typedapi/types/nerinferenceoptions.go +++ b/typedapi/types/nerinferenceoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NerInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L230-L239 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L230-L239 type NerInferenceOptions struct { // ClassificationLabels The token classification labels. Must be IOB formatted tags ClassificationLabels []string `json:"classification_labels,omitempty"` diff --git a/typedapi/types/nerinferenceupdateoptions.go b/typedapi/types/nerinferenceupdateoptions.go old mode 100755 new mode 100644 index 420515f72b..97906675dd --- a/typedapi/types/nerinferenceupdateoptions.go +++ b/typedapi/types/nerinferenceupdateoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NerInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L363-L368 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L363-L368 type NerInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/nestedaggregate.go b/typedapi/types/nestedaggregate.go old mode 100755 new mode 100644 index 70923dad53..48d21083f8 --- a/typedapi/types/nestedaggregate.go +++ b/typedapi/types/nestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // NestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L485-L486 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L486-L487 type NestedAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *NestedAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s NestedAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/nestedaggregation.go b/typedapi/types/nestedaggregation.go old mode 100755 new mode 100644 index 63c144103a..91ab5e8110 --- a/typedapi/types/nestedaggregation.go +++ b/typedapi/types/nestedaggregation.go @@ -16,21 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // NestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L281-L283 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L281-L283 type NestedAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` +} + +func (s *NestedAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + } + } + return nil } // NewNestedAggregation returns a NestedAggregation. diff --git a/typedapi/types/nestedidentity.go b/typedapi/types/nestedidentity.go old mode 100755 new mode 100644 index 0175280db7..179b1ebbd3 --- a/typedapi/types/nestedidentity.go +++ b/typedapi/types/nestedidentity.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NestedIdentity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L88-L92 type NestedIdentity struct { Field string `json:"field"` Nested_ *NestedIdentity `json:"_nested,omitempty"` Offset int `json:"offset"` } +func (s *NestedIdentity) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "_nested": + if err := dec.Decode(&s.Nested_); err != nil { + return err + } + + case "offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Offset = value + case float64: + f := int(v) + s.Offset = f + } + + } + } + return nil +} + // NewNestedIdentity returns a NestedIdentity. func NewNestedIdentity() *NestedIdentity { r := &NestedIdentity{} diff --git a/typedapi/types/nestedproperty.go b/typedapi/types/nestedproperty.go old mode 100755 new mode 100644 index 2dee630385..17e9e0dfdc --- a/typedapi/types/nestedproperty.go +++ b/typedapi/types/nestedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // NestedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/complex.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/complex.ts#L39-L44 type NestedProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -50,6 +52,7 @@ type NestedProperty struct { } func (s *NestedProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,8 +67,19 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "dynamic": @@ -74,11 +88,23 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } case "enabled": - if err := dec.Decode(&s.Enabled); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -366,33 +392,68 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "include_in_parent": - if err := dec.Decode(&s.IncludeInParent); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeInParent = &value + case bool: + s.IncludeInParent = &v } case "include_in_root": - if err := dec.Decode(&s.IncludeInRoot); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeInRoot = &value + case bool: + s.IncludeInRoot = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -680,20 +741,32 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/nestedquery.go b/typedapi/types/nestedquery.go old mode 100755 new mode 100644 index 551656d2e9..96658aea99 --- a/typedapi/types/nestedquery.go +++ b/typedapi/types/nestedquery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/childscoremode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NestedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/joining.ts#L63-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/joining.ts#L63-L71 type NestedQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` @@ -37,6 +45,84 @@ type NestedQuery struct { ScoreMode *childscoremode.ChildScoreMode `json:"score_mode,omitempty"` } +func (s *NestedQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "inner_hits": + if err := dec.Decode(&s.InnerHits); err != nil { + return err + } + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "score_mode": + if err := dec.Decode(&s.ScoreMode); err != nil { + return err + } + + } + } + return nil +} + // NewNestedQuery returns a NestedQuery. func NewNestedQuery() *NestedQuery { r := &NestedQuery{} diff --git a/typedapi/types/nestedsortvalue.go b/typedapi/types/nestedsortvalue.go old mode 100755 new mode 100644 index 65a6945de2..6b8564e00b --- a/typedapi/types/nestedsortvalue.go +++ b/typedapi/types/nestedsortvalue.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NestedSortValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L30-L35 type NestedSortValue struct { Filter *Query `json:"filter,omitempty"` MaxChildren *int `json:"max_children,omitempty"` @@ -30,6 +40,57 @@ type NestedSortValue struct { Path string `json:"path"` } +func (s *NestedSortValue) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "max_children": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxChildren = &value + case float64: + f := int(v) + s.MaxChildren = &f + } + + case "nested": + if err := dec.Decode(&s.Nested); err != nil { + return err + } + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + } + } + return nil +} + // NewNestedSortValue returns a NestedSortValue. func NewNestedSortValue() *NestedSortValue { r := &NestedSortValue{} diff --git a/typedapi/types/nevercondition.go b/typedapi/types/nevercondition.go old mode 100755 new mode 100644 index 2713c7e22b..2f310dbc55 --- a/typedapi/types/nevercondition.go +++ b/typedapi/types/nevercondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NeverCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L69-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L69-L69 type NeverCondition struct { } diff --git a/typedapi/types/ngramtokenfilter.go b/typedapi/types/ngramtokenfilter.go old mode 100755 new mode 100644 index 877a75f73f..1ff484b0d8 --- a/typedapi/types/ngramtokenfilter.go +++ b/typedapi/types/ngramtokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L265-L270 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L266-L271 type NGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -31,6 +41,82 @@ type NGramTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGram = &value + case float64: + f := int(v) + s.MaxGram = &f + } + + case "min_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinGram = &value + case float64: + f := int(v) + s.MinGram = &f + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNGramTokenFilter returns a NGramTokenFilter. func NewNGramTokenFilter() *NGramTokenFilter { r := &NGramTokenFilter{} diff --git a/typedapi/types/ngramtokenizer.go b/typedapi/types/ngramtokenizer.go old mode 100755 new mode 100644 index 78052bf954..42044f0802 --- a/typedapi/types/ngramtokenizer.go +++ b/typedapi/types/ngramtokenizer.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tokenchar" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L38-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L38-L44 type NGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -36,6 +44,81 @@ type NGramTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "custom_token_chars": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CustomTokenChars = &o + + case "max_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxGram = value + case float64: + f := int(v) + s.MaxGram = f + } + + case "min_gram": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinGram = value + case float64: + f := int(v) + s.MinGram = f + } + + case "token_chars": + if err := dec.Decode(&s.TokenChars); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNGramTokenizer returns a NGramTokenizer. func NewNGramTokenizer() *NGramTokenizer { r := &NGramTokenizer{} diff --git a/typedapi/types/nlpberttokenizationconfig.go b/typedapi/types/nlpberttokenizationconfig.go old mode 100755 new mode 100644 index 2070a70b4d..e1dbf1817d --- a/typedapi/types/nlpberttokenizationconfig.go +++ b/typedapi/types/nlpberttokenizationconfig.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tokenizationtruncate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NlpBertTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L116-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L116-L143 type NlpBertTokenizationConfig struct { // DoLowerCase Should the tokenizer lower case the text DoLowerCase *bool `json:"do_lower_case,omitempty"` @@ -42,6 +50,91 @@ type NlpBertTokenizationConfig struct { WithSpecialTokens *bool `json:"with_special_tokens,omitempty"` } +func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "do_lower_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DoLowerCase = &value + case bool: + s.DoLowerCase = &v + } + + case "max_sequence_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxSequenceLength = &value + case float64: + f := int(v) + s.MaxSequenceLength = &f + } + + case "span": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Span = &value + case float64: + f := int(v) + s.Span = &f + } + + case "truncate": + if err := dec.Decode(&s.Truncate); err != nil { + return err + } + + case "with_special_tokens": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.WithSpecialTokens = &value + case bool: + s.WithSpecialTokens = &v + } + + } + } + return nil +} + // NewNlpBertTokenizationConfig returns a NlpBertTokenizationConfig. func NewNlpBertTokenizationConfig() *NlpBertTokenizationConfig { r := &NlpBertTokenizationConfig{} diff --git a/typedapi/types/nlprobertatokenizationconfig.go b/typedapi/types/nlprobertatokenizationconfig.go old mode 100755 new mode 100644 index 6c99fa8ca9..1893534fda --- a/typedapi/types/nlprobertatokenizationconfig.go +++ b/typedapi/types/nlprobertatokenizationconfig.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tokenizationtruncate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NlpRobertaTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L145-L172 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L145-L172 type NlpRobertaTokenizationConfig struct { // AddPrefixSpace Should the tokenizer prefix input with a space character AddPrefixSpace *bool `json:"add_prefix_space,omitempty"` @@ -42,6 +50,91 @@ type NlpRobertaTokenizationConfig struct { WithSpecialTokens *bool `json:"with_special_tokens,omitempty"` } +func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "add_prefix_space": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AddPrefixSpace = &value + case bool: + s.AddPrefixSpace = &v + } + + case "max_sequence_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxSequenceLength = &value + case float64: + f := int(v) + s.MaxSequenceLength = &f + } + + case "span": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Span = &value + case float64: + f := int(v) + s.Span = &f + } + + case "truncate": + if err := dec.Decode(&s.Truncate); err != nil { + return err + } + + case "with_special_tokens": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.WithSpecialTokens = &value + case bool: + s.WithSpecialTokens = &v + } + + } + } + return nil +} + // NewNlpRobertaTokenizationConfig returns a NlpRobertaTokenizationConfig. func NewNlpRobertaTokenizationConfig() *NlpRobertaTokenizationConfig { r := &NlpRobertaTokenizationConfig{} diff --git a/typedapi/types/nlptokenizationupdateoptions.go b/typedapi/types/nlptokenizationupdateoptions.go old mode 100755 new mode 100644 index 785463bbb8..15f43cf9ef --- a/typedapi/types/nlptokenizationupdateoptions.go +++ b/typedapi/types/nlptokenizationupdateoptions.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tokenizationtruncate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NlpTokenizationUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L321-L326 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L321-L326 type NlpTokenizationUpdateOptions struct { // Span Span options to apply Span *int `json:"span,omitempty"` @@ -34,6 +42,47 @@ type NlpTokenizationUpdateOptions struct { Truncate *tokenizationtruncate.TokenizationTruncate `json:"truncate,omitempty"` } +func (s *NlpTokenizationUpdateOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "span": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Span = &value + case float64: + f := int(v) + s.Span = &f + } + + case "truncate": + if err := dec.Decode(&s.Truncate); err != nil { + return err + } + + } + } + return nil +} + // NewNlpTokenizationUpdateOptions returns a NlpTokenizationUpdateOptions. func NewNlpTokenizationUpdateOptions() *NlpTokenizationUpdateOptions { r := &NlpTokenizationUpdateOptions{} diff --git a/typedapi/types/node.go b/typedapi/types/node.go old mode 100755 new mode 100644 index 4eb93e7027..a82bc403bb --- a/typedapi/types/node.go +++ b/typedapi/types/node.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Node type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 type Node struct { SharedCache Shared `json:"shared_cache"` } diff --git a/typedapi/types/nodeallocationexplanation.go b/typedapi/types/nodeallocationexplanation.go old mode 100755 new mode 100644 index df0eaeab9e..2c38accfd2 --- a/typedapi/types/nodeallocationexplanation.go +++ b/typedapi/types/nodeallocationexplanation.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/decision" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NodeAllocationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L97-L106 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L97-L106 type NodeAllocationExplanation struct { Deciders []AllocationDecision `json:"deciders"` NodeAttributes map[string]string `json:"node_attributes"` @@ -38,6 +46,80 @@ type NodeAllocationExplanation struct { WeightRanking int `json:"weight_ranking"` } +func (s *NodeAllocationExplanation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "deciders": + if err := dec.Decode(&s.Deciders); err != nil { + return err + } + + case "node_attributes": + if s.NodeAttributes == nil { + s.NodeAttributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.NodeAttributes); err != nil { + return err + } + + case "node_decision": + if err := dec.Decode(&s.NodeDecision); err != nil { + return err + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "node_name": + if err := dec.Decode(&s.NodeName); err != nil { + return err + } + + case "store": + if err := dec.Decode(&s.Store); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + case "weight_ranking": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.WeightRanking = value + case float64: + f := int(v) + s.WeightRanking = f + } + + } + } + return nil +} + // NewNodeAllocationExplanation returns a NodeAllocationExplanation. func NewNodeAllocationExplanation() *NodeAllocationExplanation { r := &NodeAllocationExplanation{ diff --git a/typedapi/types/nodeattributes.go b/typedapi/types/nodeattributes.go old mode 100755 new mode 100644 index 5a3f234006..aadfe40289 --- a/typedapi/types/nodeattributes.go +++ b/typedapi/types/nodeattributes.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "encoding/json" ) // NodeAttributes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L41-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L41-L57 type NodeAttributes struct { // Attributes Lists node attributes. Attributes map[string]string `json:"attributes"` @@ -42,6 +48,67 @@ type NodeAttributes struct { TransportAddress string `json:"transport_address"` } +func (s *NodeAttributes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "ephemeral_id": + if err := dec.Decode(&s.EphemeralId); err != nil { + return err + } + + case "external_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ExternalId = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewNodeAttributes returns a NodeAttributes. func NewNodeAttributes() *NodeAttributes { r := &NodeAttributes{ diff --git a/typedapi/types/nodeattributesrecord.go b/typedapi/types/nodeattributesrecord.go old mode 100755 new mode 100644 index 7d53b4c86d..1f442f9e0a --- a/typedapi/types/nodeattributesrecord.go +++ b/typedapi/types/nodeattributesrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeAttributesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/nodeattrs/types.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/nodeattrs/types.ts#L20-L55 type NodeAttributesRecord struct { // Attr attribute description Attr *string `json:"attr,omitempty"` diff --git a/typedapi/types/nodebufferpool.go b/typedapi/types/nodebufferpool.go old mode 100755 new mode 100644 index e4c33b1841..43127ccef4 --- a/typedapi/types/nodebufferpool.go +++ b/typedapi/types/nodebufferpool.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeBufferPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L316-L322 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L316-L322 type NodeBufferPool struct { Count *int64 `json:"count,omitempty"` TotalCapacity *string `json:"total_capacity,omitempty"` @@ -31,6 +41,87 @@ type NodeBufferPool struct { UsedInBytes *int64 `json:"used_in_bytes,omitempty"` } +func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "total_capacity": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TotalCapacity = &o + + case "total_capacity_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalCapacityInBytes = &value + case float64: + f := int64(v) + s.TotalCapacityInBytes = &f + } + + case "used": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Used = &o + + case "used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedInBytes = &value + case float64: + f := int64(v) + s.UsedInBytes = &f + } + + } + } + return nil +} + // NewNodeBufferPool returns a NodeBufferPool. func NewNodeBufferPool() *NodeBufferPool { r := &NodeBufferPool{} diff --git a/typedapi/types/nodediskusage.go b/typedapi/types/nodediskusage.go old mode 100755 new mode 100644 index 1dcf2ef534..da7183f2ae --- a/typedapi/types/nodediskusage.go +++ b/typedapi/types/nodediskusage.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeDiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L56-L60 type NodeDiskUsage struct { LeastAvailable DiskUsage `json:"least_available"` MostAvailable DiskUsage `json:"most_available"` NodeName string `json:"node_name"` } +func (s *NodeDiskUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "least_available": + if err := dec.Decode(&s.LeastAvailable); err != nil { + return err + } + + case "most_available": + if err := dec.Decode(&s.MostAvailable); err != nil { + return err + } + + case "node_name": + if err := dec.Decode(&s.NodeName); err != nil { + return err + } + + } + } + return nil +} + // NewNodeDiskUsage returns a NodeDiskUsage. func NewNodeDiskUsage() *NodeDiskUsage { r := &NodeDiskUsage{} diff --git a/typedapi/types/nodeids.go b/typedapi/types/nodeids.go old mode 100755 new mode 100644 index 691a88a02e..8c991cf8c3 --- a/typedapi/types/nodeids.go +++ b/typedapi/types/nodeids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L58-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L58-L58 type NodeIds []string diff --git a/typedapi/types/nodeinfo.go b/typedapi/types/nodeinfo.go old mode 100755 new mode 100644 index 931f6d7f3c..c3ef1b25e0 --- a/typedapi/types/nodeinfo.go +++ b/typedapi/types/nodeinfo.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L30-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L30-L66 type NodeInfo struct { Aggregations map[string]NodeInfoAggregation `json:"aggregations,omitempty"` Attributes map[string]string `json:"attributes"` @@ -64,6 +72,174 @@ type NodeInfo struct { Version string `json:"version"` } +func (s *NodeInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]NodeInfoAggregation, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return err + } + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "build_flavor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildFlavor = o + + case "build_hash": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildHash = o + + case "build_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BuildType = o + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "http": + if err := dec.Decode(&s.Http); err != nil { + return err + } + + case "ingest": + if err := dec.Decode(&s.Ingest); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "jvm": + if err := dec.Decode(&s.Jvm); err != nil { + return err + } + + case "modules": + if err := dec.Decode(&s.Modules); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "network": + if err := dec.Decode(&s.Network); err != nil { + return err + } + + case "os": + if err := dec.Decode(&s.Os); err != nil { + return err + } + + case "plugins": + if err := dec.Decode(&s.Plugins); err != nil { + return err + } + + case "process": + if err := dec.Decode(&s.Process); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "thread_pool": + if s.ThreadPool == nil { + s.ThreadPool = make(map[string]NodeThreadPoolInfo, 0) + } + if err := dec.Decode(&s.ThreadPool); err != nil { + return err + } + + case "total_indexing_buffer": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalIndexingBuffer = &value + case float64: + f := int64(v) + s.TotalIndexingBuffer = &f + } + + case "total_indexing_buffer_in_bytes": + if err := dec.Decode(&s.TotalIndexingBufferInBytes); err != nil { + return err + } + + case "transport": + if err := dec.Decode(&s.Transport); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfo returns a NodeInfo. func NewNodeInfo() *NodeInfo { r := &NodeInfo{ diff --git a/typedapi/types/nodeinfoaction.go b/typedapi/types/nodeinfoaction.go old mode 100755 new mode 100644 index 2a1e0db7a8..09431460ff --- a/typedapi/types/nodeinfoaction.go +++ b/typedapi/types/nodeinfoaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L173-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L173-L175 type NodeInfoAction struct { DestructiveRequiresName string `json:"destructive_requires_name"` } diff --git a/typedapi/types/nodeinfoaggregation.go b/typedapi/types/nodeinfoaggregation.go old mode 100755 new mode 100644 index ae0106456f..a5abd88e46 --- a/typedapi/types/nodeinfoaggregation.go +++ b/typedapi/types/nodeinfoaggregation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L224-L226 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L224-L226 type NodeInfoAggregation struct { Types []string `json:"types"` } diff --git a/typedapi/types/nodeinfobootstrap.go b/typedapi/types/nodeinfobootstrap.go old mode 100755 new mode 100644 index e43a2f9f5a..79d3d7929b --- a/typedapi/types/nodeinfobootstrap.go +++ b/typedapi/types/nodeinfobootstrap.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoBootstrap type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L193-L195 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L193-L195 type NodeInfoBootstrap struct { MemoryLock string `json:"memory_lock"` } diff --git a/typedapi/types/nodeinfoclient.go b/typedapi/types/nodeinfoclient.go old mode 100755 new mode 100644 index 9c6c33e0c4..82151dec5a --- a/typedapi/types/nodeinfoclient.go +++ b/typedapi/types/nodeinfoclient.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoClient type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L177-L179 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L177-L179 type NodeInfoClient struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfodiscover.go b/typedapi/types/nodeinfodiscover.go old mode 100755 new mode 100644 index 4fee737c71..3e2b2ed010 --- a/typedapi/types/nodeinfodiscover.go +++ b/typedapi/types/nodeinfodiscover.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoDiscover type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L169-L171 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L169-L171 type NodeInfoDiscover struct { SeedHosts string `json:"seed_hosts"` } diff --git a/typedapi/types/nodeinfohttp.go b/typedapi/types/nodeinfohttp.go old mode 100755 new mode 100644 index 7767e59bd8..157d220a2b --- a/typedapi/types/nodeinfohttp.go +++ b/typedapi/types/nodeinfohttp.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeInfoHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L295-L300 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L295-L300 type NodeInfoHttp struct { BoundAddress []string `json:"bound_address"` MaxContentLength ByteSize `json:"max_content_length,omitempty"` @@ -30,6 +40,59 @@ type NodeInfoHttp struct { PublishAddress string `json:"publish_address"` } +func (s *NodeInfoHttp) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bound_address": + if err := dec.Decode(&s.BoundAddress); err != nil { + return err + } + + case "max_content_length": + if err := dec.Decode(&s.MaxContentLength); err != nil { + return err + } + + case "max_content_length_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxContentLengthInBytes = value + case float64: + f := int64(v) + s.MaxContentLengthInBytes = f + } + + case "publish_address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PublishAddress = o + + } + } + return nil +} + // NewNodeInfoHttp returns a NodeInfoHttp. func NewNodeInfoHttp() *NodeInfoHttp { r := &NodeInfoHttp{} diff --git a/typedapi/types/nodeinfoingest.go b/typedapi/types/nodeinfoingest.go old mode 100755 new mode 100644 index 96b1dce5ac..8561800a1f --- a/typedapi/types/nodeinfoingest.go +++ b/typedapi/types/nodeinfoingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L216-L218 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L216-L218 type NodeInfoIngest struct { Processors []NodeInfoIngestProcessor `json:"processors"` } diff --git a/typedapi/types/nodeinfoingestdownloader.go b/typedapi/types/nodeinfoingestdownloader.go old mode 100755 new mode 100644 index b5bf4dd1ef..bdf5b81199 --- a/typedapi/types/nodeinfoingestdownloader.go +++ b/typedapi/types/nodeinfoingestdownloader.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoIngestDownloader type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L127-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L127-L129 type NodeInfoIngestDownloader struct { Enabled string `json:"enabled"` } diff --git a/typedapi/types/nodeinfoingestinfo.go b/typedapi/types/nodeinfoingestinfo.go old mode 100755 new mode 100644 index 2bb60e9f13..4cda9aa684 --- a/typedapi/types/nodeinfoingestinfo.go +++ b/typedapi/types/nodeinfoingestinfo.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoIngestInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L123-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L123-L125 type NodeInfoIngestInfo struct { Downloader NodeInfoIngestDownloader `json:"downloader"` } diff --git a/typedapi/types/nodeinfoingestprocessor.go b/typedapi/types/nodeinfoingestprocessor.go old mode 100755 new mode 100644 index 58b7be7944..a9015c0aef --- a/typedapi/types/nodeinfoingestprocessor.go +++ b/typedapi/types/nodeinfoingestprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L220-L222 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L220-L222 type NodeInfoIngestProcessor struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfojvmmemory.go b/typedapi/types/nodeinfojvmmemory.go old mode 100755 new mode 100644 index 5896a4f159..bf223b3384 --- a/typedapi/types/nodeinfojvmmemory.go +++ b/typedapi/types/nodeinfojvmmemory.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeInfoJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L302-L313 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L302-L313 type NodeInfoJvmMemory struct { DirectMax ByteSize `json:"direct_max,omitempty"` DirectMaxInBytes int64 `json:"direct_max_in_bytes"` @@ -36,6 +46,126 @@ type NodeInfoJvmMemory struct { NonHeapMaxInBytes int64 `json:"non_heap_max_in_bytes"` } +func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "direct_max": + if err := dec.Decode(&s.DirectMax); err != nil { + return err + } + + case "direct_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DirectMaxInBytes = value + case float64: + f := int64(v) + s.DirectMaxInBytes = f + } + + case "heap_init": + if err := dec.Decode(&s.HeapInit); err != nil { + return err + } + + case "heap_init_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapInitInBytes = value + case float64: + f := int64(v) + s.HeapInitInBytes = f + } + + case "heap_max": + if err := dec.Decode(&s.HeapMax); err != nil { + return err + } + + case "heap_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HeapMaxInBytes = value + case float64: + f := int64(v) + s.HeapMaxInBytes = f + } + + case "non_heap_init": + if err := dec.Decode(&s.NonHeapInit); err != nil { + return err + } + + case "non_heap_init_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NonHeapInitInBytes = value + case float64: + f := int64(v) + s.NonHeapInitInBytes = f + } + + case "non_heap_max": + if err := dec.Decode(&s.NonHeapMax); err != nil { + return err + } + + case "non_heap_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NonHeapMaxInBytes = value + case float64: + f := int64(v) + s.NonHeapMaxInBytes = f + } + + } + } + return nil +} + // NewNodeInfoJvmMemory returns a NodeInfoJvmMemory. func NewNodeInfoJvmMemory() *NodeInfoJvmMemory { r := &NodeInfoJvmMemory{} diff --git a/typedapi/types/nodeinfomemory.go b/typedapi/types/nodeinfomemory.go old mode 100755 new mode 100644 index d7f28104e8..fad47ab4a9 --- a/typedapi/types/nodeinfomemory.go +++ b/typedapi/types/nodeinfomemory.go @@ -16,18 +16,71 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeInfoMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L315-L318 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L315-L318 type NodeInfoMemory struct { Total string `json:"total"` TotalInBytes int64 `json:"total_in_bytes"` } +func (s *NodeInfoMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Total = o + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = value + case float64: + f := int64(v) + s.TotalInBytes = f + } + + } + } + return nil +} + // NewNodeInfoMemory returns a NodeInfoMemory. func NewNodeInfoMemory() *NodeInfoMemory { r := &NodeInfoMemory{} diff --git a/typedapi/types/nodeinfonetwork.go b/typedapi/types/nodeinfonetwork.go old mode 100755 new mode 100644 index 47d6f62712..9f1ded12bb --- a/typedapi/types/nodeinfonetwork.go +++ b/typedapi/types/nodeinfonetwork.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeInfoNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L320-L323 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L320-L323 type NodeInfoNetwork struct { PrimaryInterface NodeInfoNetworkInterface `json:"primary_interface"` RefreshInterval int `json:"refresh_interval"` } +func (s *NodeInfoNetwork) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "primary_interface": + if err := dec.Decode(&s.PrimaryInterface); err != nil { + return err + } + + case "refresh_interval": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RefreshInterval = value + case float64: + f := int(v) + s.RefreshInterval = f + } + + } + } + return nil +} + // NewNodeInfoNetwork returns a NodeInfoNetwork. func NewNodeInfoNetwork() *NodeInfoNetwork { r := &NodeInfoNetwork{} diff --git a/typedapi/types/nodeinfonetworkinterface.go b/typedapi/types/nodeinfonetworkinterface.go old mode 100755 new mode 100644 index 5632ea412c..4a619b1eae --- a/typedapi/types/nodeinfonetworkinterface.go +++ b/typedapi/types/nodeinfonetworkinterface.go @@ -16,19 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoNetworkInterface type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L325-L329 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L325-L329 type NodeInfoNetworkInterface struct { Address string `json:"address"` MacAddress string `json:"mac_address"` Name string `json:"name"` } +func (s *NodeInfoNetworkInterface) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Address = o + + case "mac_address": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MacAddress = o + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfoNetworkInterface returns a NodeInfoNetworkInterface. func NewNodeInfoNetworkInterface() *NodeInfoNetworkInterface { r := &NodeInfoNetworkInterface{} diff --git a/typedapi/types/nodeinfooscpu.go b/typedapi/types/nodeinfooscpu.go old mode 100755 new mode 100644 index ac81c93b3d..f9578723f4 --- a/typedapi/types/nodeinfooscpu.go +++ b/typedapi/types/nodeinfooscpu.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeInfoOSCPU type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L331-L340 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L331-L340 type NodeInfoOSCPU struct { CacheSize string `json:"cache_size"` CacheSizeInBytes int `json:"cache_size_in_bytes"` @@ -34,6 +44,130 @@ type NodeInfoOSCPU struct { Vendor string `json:"vendor"` } +func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CacheSize = o + + case "cache_size_in_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CacheSizeInBytes = value + case float64: + f := int(v) + s.CacheSizeInBytes = f + } + + case "cores_per_socket": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CoresPerSocket = value + case float64: + f := int(v) + s.CoresPerSocket = f + } + + case "mhz": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Mhz = value + case float64: + f := int(v) + s.Mhz = f + } + + case "model": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Model = o + + case "total_cores": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalCores = value + case float64: + f := int(v) + s.TotalCores = f + } + + case "total_sockets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalSockets = value + case float64: + f := int(v) + s.TotalSockets = f + } + + case "vendor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Vendor = o + + } + } + return nil +} + // NewNodeInfoOSCPU returns a NodeInfoOSCPU. func NewNodeInfoOSCPU() *NodeInfoOSCPU { r := &NodeInfoOSCPU{} diff --git a/typedapi/types/nodeinfopath.go b/typedapi/types/nodeinfopath.go old mode 100755 new mode 100644 index 49fd62d8f7..a0f83d6dfc --- a/typedapi/types/nodeinfopath.go +++ b/typedapi/types/nodeinfopath.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L154-L159 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L154-L159 type NodeInfoPath struct { Data []string `json:"data,omitempty"` Home string `json:"home"` diff --git a/typedapi/types/nodeinforepositories.go b/typedapi/types/nodeinforepositories.go old mode 100755 new mode 100644 index 5b14d58ea8..3cb1b14456 --- a/typedapi/types/nodeinforepositories.go +++ b/typedapi/types/nodeinforepositories.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoRepositories type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L161-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L161-L163 type NodeInfoRepositories struct { Url NodeInfoRepositoriesUrl `json:"url"` } diff --git a/typedapi/types/nodeinforepositoriesurl.go b/typedapi/types/nodeinforepositoriesurl.go old mode 100755 new mode 100644 index 8a8e72f356..14f18478f2 --- a/typedapi/types/nodeinforepositoriesurl.go +++ b/typedapi/types/nodeinforepositoriesurl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoRepositoriesUrl type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L165-L167 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L165-L167 type NodeInfoRepositoriesUrl struct { AllowedUrls string `json:"allowed_urls"` } diff --git a/typedapi/types/nodeinfoscript.go b/typedapi/types/nodeinfoscript.go old mode 100755 new mode 100644 index 1b44dc8b10..b7e6c68420 --- a/typedapi/types/nodeinfoscript.go +++ b/typedapi/types/nodeinfoscript.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L273-L276 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L273-L276 type NodeInfoScript struct { AllowedTypes string `json:"allowed_types"` DisableMaxCompilationsRate string `json:"disable_max_compilations_rate"` diff --git a/typedapi/types/nodeinfosearch.go b/typedapi/types/nodeinfosearch.go old mode 100755 new mode 100644 index 7818aea6fe..e41a7a3b07 --- a/typedapi/types/nodeinfosearch.go +++ b/typedapi/types/nodeinfosearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L278-L280 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L278-L280 type NodeInfoSearch struct { Remote NodeInfoSearchRemote `json:"remote"` } diff --git a/typedapi/types/nodeinfosearchremote.go b/typedapi/types/nodeinfosearchremote.go old mode 100755 new mode 100644 index 1380c30a7f..9ff645ab96 --- a/typedapi/types/nodeinfosearchremote.go +++ b/typedapi/types/nodeinfosearchremote.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSearchRemote type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L282-L284 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L282-L284 type NodeInfoSearchRemote struct { Connect string `json:"connect"` } diff --git a/typedapi/types/nodeinfosettings.go b/typedapi/types/nodeinfosettings.go old mode 100755 new mode 100644 index 0ea937e4f3..ef6cdb04b4 --- a/typedapi/types/nodeinfosettings.go +++ b/typedapi/types/nodeinfosettings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L68-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L68-L84 type NodeInfoSettings struct { Action *NodeInfoAction `json:"action,omitempty"` Bootstrap *NodeInfoBootstrap `json:"bootstrap,omitempty"` diff --git a/typedapi/types/nodeinfosettingscluster.go b/typedapi/types/nodeinfosettingscluster.go old mode 100755 new mode 100644 index 6f55a47f2e..7301fcfcc9 --- a/typedapi/types/nodeinfosettingscluster.go +++ b/typedapi/types/nodeinfosettingscluster.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoSettingsCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L131-L138 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L131-L138 type NodeInfoSettingsCluster struct { DeprecationIndexing *DeprecationIndexing `json:"deprecation_indexing,omitempty"` Election NodeInfoSettingsClusterElection `json:"election"` @@ -31,6 +39,54 @@ type NodeInfoSettingsCluster struct { Routing *IndexRouting `json:"routing,omitempty"` } +func (s *NodeInfoSettingsCluster) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "deprecation_indexing": + if err := dec.Decode(&s.DeprecationIndexing); err != nil { + return err + } + + case "election": + if err := dec.Decode(&s.Election); err != nil { + return err + } + + case "initial_master_nodes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.InitialMasterNodes = &o + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfoSettingsCluster returns a NodeInfoSettingsCluster. func NewNodeInfoSettingsCluster() *NodeInfoSettingsCluster { r := &NodeInfoSettingsCluster{} diff --git a/typedapi/types/nodeinfosettingsclusterelection.go b/typedapi/types/nodeinfosettingsclusterelection.go old mode 100755 new mode 100644 index 094230dc14..9c724b938f --- a/typedapi/types/nodeinfosettingsclusterelection.go +++ b/typedapi/types/nodeinfosettingsclusterelection.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoSettingsClusterElection type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L144-L146 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L144-L146 type NodeInfoSettingsClusterElection struct { Strategy string `json:"strategy"` } +func (s *NodeInfoSettingsClusterElection) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "strategy": + if err := dec.Decode(&s.Strategy); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfoSettingsClusterElection returns a NodeInfoSettingsClusterElection. func NewNodeInfoSettingsClusterElection() *NodeInfoSettingsClusterElection { r := &NodeInfoSettingsClusterElection{} diff --git a/typedapi/types/nodeinfosettingshttp.go b/typedapi/types/nodeinfosettingshttp.go old mode 100755 new mode 100644 index 0aa38c45b5..0b14a0c3ed --- a/typedapi/types/nodeinfosettingshttp.go +++ b/typedapi/types/nodeinfosettingshttp.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // NodeInfoSettingsHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L181-L186 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L181-L186 type NodeInfoSettingsHttp struct { Compression string `json:"compression,omitempty"` Port string `json:"port,omitempty"` @@ -30,6 +37,55 @@ type NodeInfoSettingsHttp struct { TypeDefault *string `json:"type.default,omitempty"` } +func (s *NodeInfoSettingsHttp) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compression": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Compression = o + + case "port": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Port = o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "type.default": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TypeDefault = &o + + } + } + return nil +} + // NewNodeInfoSettingsHttp returns a NodeInfoSettingsHttp. func NewNodeInfoSettingsHttp() *NodeInfoSettingsHttp { r := &NodeInfoSettingsHttp{} diff --git a/typedapi/types/nodeinfosettingshttptype.go b/typedapi/types/nodeinfosettingshttptype.go old mode 100755 new mode 100644 index 637a1a0782..81e0b896fb --- a/typedapi/types/nodeinfosettingshttptype.go +++ b/typedapi/types/nodeinfosettingshttptype.go @@ -16,17 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoSettingsHttpType type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L188-L191 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L188-L191 type NodeInfoSettingsHttpType struct { Default string `json:"default"` } +func (s *NodeInfoSettingsHttpType) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Default) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "default": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Default = o + + } + } + return nil +} + // NewNodeInfoSettingsHttpType returns a NodeInfoSettingsHttpType. func NewNodeInfoSettingsHttpType() *NodeInfoSettingsHttpType { r := &NodeInfoSettingsHttpType{} diff --git a/typedapi/types/nodeinfosettingsingest.go b/typedapi/types/nodeinfosettingsingest.go old mode 100755 new mode 100644 index 0e1c738648..907011bab3 --- a/typedapi/types/nodeinfosettingsingest.go +++ b/typedapi/types/nodeinfosettingsingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSettingsIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L86-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L86-L121 type NodeInfoSettingsIngest struct { Append *NodeInfoIngestInfo `json:"append,omitempty"` Attachment *NodeInfoIngestInfo `json:"attachment,omitempty"` diff --git a/typedapi/types/nodeinfosettingsnetwork.go b/typedapi/types/nodeinfosettingsnetwork.go old mode 100755 new mode 100644 index a047ecae1f..57c97f495b --- a/typedapi/types/nodeinfosettingsnetwork.go +++ b/typedapi/types/nodeinfosettingsnetwork.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoSettingsNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L212-L214 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L212-L214 type NodeInfoSettingsNetwork struct { Host string `json:"host"` } +func (s *NodeInfoSettingsNetwork) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfoSettingsNetwork returns a NodeInfoSettingsNetwork. func NewNodeInfoSettingsNetwork() *NodeInfoSettingsNetwork { r := &NodeInfoSettingsNetwork{} diff --git a/typedapi/types/nodeinfosettingsnode.go b/typedapi/types/nodeinfosettingsnode.go old mode 100755 new mode 100644 index 45854b0f03..5ed633e1a4 --- a/typedapi/types/nodeinfosettingsnode.go +++ b/typedapi/types/nodeinfosettingsnode.go @@ -16,23 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // NodeInfoSettingsNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L148-L152 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L148-L152 type NodeInfoSettingsNode struct { Attr map[string]json.RawMessage `json:"attr"` MaxLocalStorageNodes *string `json:"max_local_storage_nodes,omitempty"` Name string `json:"name"` } +func (s *NodeInfoSettingsNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attr": + if s.Attr == nil { + s.Attr = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Attr); err != nil { + return err + } + + case "max_local_storage_nodes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxLocalStorageNodes = &o + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewNodeInfoSettingsNode returns a NodeInfoSettingsNode. func NewNodeInfoSettingsNode() *NodeInfoSettingsNode { r := &NodeInfoSettingsNode{ diff --git a/typedapi/types/nodeinfosettingstransport.go b/typedapi/types/nodeinfosettingstransport.go old mode 100755 new mode 100644 index d2bd4082e5..ab6182730b --- a/typedapi/types/nodeinfosettingstransport.go +++ b/typedapi/types/nodeinfosettingstransport.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSettingsTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L197-L201 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L197-L201 type NodeInfoSettingsTransport struct { Features *NodeInfoSettingsTransportFeatures `json:"features,omitempty"` Type NodeInfoSettingsTransportType `json:"type"` diff --git a/typedapi/types/nodeinfosettingstransportfeatures.go b/typedapi/types/nodeinfosettingstransportfeatures.go old mode 100755 new mode 100644 index bbff372494..4af6036aff --- a/typedapi/types/nodeinfosettingstransportfeatures.go +++ b/typedapi/types/nodeinfosettingstransportfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoSettingsTransportFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L208-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L208-L210 type NodeInfoSettingsTransportFeatures struct { XPack string `json:"x-pack"` } diff --git a/typedapi/types/nodeinfosettingstransporttype.go b/typedapi/types/nodeinfosettingstransporttype.go old mode 100755 new mode 100644 index aafe6fb20e..c89b95615a --- a/typedapi/types/nodeinfosettingstransporttype.go +++ b/typedapi/types/nodeinfosettingstransporttype.go @@ -16,17 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeInfoSettingsTransportType type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L203-L206 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L203-L206 type NodeInfoSettingsTransportType struct { Default string `json:"default"` } +func (s *NodeInfoSettingsTransportType) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Default) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "default": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Default = o + + } + } + return nil +} + // NewNodeInfoSettingsTransportType returns a NodeInfoSettingsTransportType. func NewNodeInfoSettingsTransportType() *NodeInfoSettingsTransportType { r := &NodeInfoSettingsTransportType{} diff --git a/typedapi/types/nodeinfotransport.go b/typedapi/types/nodeinfotransport.go old mode 100755 new mode 100644 index 3ada8eb814..75723116c6 --- a/typedapi/types/nodeinfotransport.go +++ b/typedapi/types/nodeinfotransport.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L342-L346 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L342-L346 type NodeInfoTransport struct { BoundAddress []string `json:"bound_address"` Profiles map[string]string `json:"profiles"` diff --git a/typedapi/types/nodeinfoxpack.go b/typedapi/types/nodeinfoxpack.go old mode 100755 new mode 100644 index 8a1472e863..d9bffdb07b --- a/typedapi/types/nodeinfoxpack.go +++ b/typedapi/types/nodeinfoxpack.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // NodeInfoXpack type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L228-L232 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L228-L232 type NodeInfoXpack struct { License *NodeInfoXpackLicense `json:"license,omitempty"` Notification map[string]json.RawMessage `json:"notification,omitempty"` diff --git a/typedapi/types/nodeinfoxpacklicense.go b/typedapi/types/nodeinfoxpacklicense.go old mode 100755 new mode 100644 index e5a9073637..b2f833d754 --- a/typedapi/types/nodeinfoxpacklicense.go +++ b/typedapi/types/nodeinfoxpacklicense.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackLicense type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L265-L267 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L265-L267 type NodeInfoXpackLicense struct { SelfGenerated NodeInfoXpackLicenseType `json:"self_generated"` } diff --git a/typedapi/types/nodeinfoxpacklicensetype.go b/typedapi/types/nodeinfoxpacklicensetype.go old mode 100755 new mode 100644 index db23369030..38287e7713 --- a/typedapi/types/nodeinfoxpacklicensetype.go +++ b/typedapi/types/nodeinfoxpacklicensetype.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackLicenseType type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L269-L271 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L269-L271 type NodeInfoXpackLicenseType struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfoxpacksecurity.go b/typedapi/types/nodeinfoxpacksecurity.go old mode 100755 new mode 100644 index 24bc770c01..fab68560cb --- a/typedapi/types/nodeinfoxpacksecurity.go +++ b/typedapi/types/nodeinfoxpacksecurity.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L234-L239 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L234-L239 type NodeInfoXpackSecurity struct { Authc *NodeInfoXpackSecurityAuthc `json:"authc,omitempty"` Enabled string `json:"enabled"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthc.go b/typedapi/types/nodeinfoxpacksecurityauthc.go old mode 100755 new mode 100644 index 5bb0d8cfe0..59632a80a0 --- a/typedapi/types/nodeinfoxpacksecurityauthc.go +++ b/typedapi/types/nodeinfoxpacksecurityauthc.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecurityAuthc type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L245-L248 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L245-L248 type NodeInfoXpackSecurityAuthc struct { Realms NodeInfoXpackSecurityAuthcRealms `json:"realms"` Token NodeInfoXpackSecurityAuthcToken `json:"token"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go old mode 100755 new mode 100644 index bd0a5103c3..8431bb6edf --- a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecurityAuthcRealms type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L250-L254 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L250-L254 type NodeInfoXpackSecurityAuthcRealms struct { File map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"file,omitempty"` Native map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"native,omitempty"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go old mode 100755 new mode 100644 index 625e193aff..b48067ae83 --- a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecurityAuthcRealmsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L260-L263 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L260-L263 type NodeInfoXpackSecurityAuthcRealmsStatus struct { Enabled *string `json:"enabled,omitempty"` Order string `json:"order"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthctoken.go b/typedapi/types/nodeinfoxpacksecurityauthctoken.go old mode 100755 new mode 100644 index dd8da36294..0791800f32 --- a/typedapi/types/nodeinfoxpacksecurityauthctoken.go +++ b/typedapi/types/nodeinfoxpacksecurityauthctoken.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecurityAuthcToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L256-L258 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L256-L258 type NodeInfoXpackSecurityAuthcToken struct { Enabled string `json:"enabled"` } diff --git a/typedapi/types/nodeinfoxpacksecurityssl.go b/typedapi/types/nodeinfoxpacksecurityssl.go old mode 100755 new mode 100644 index 0cee1307de..29633e1e4a --- a/typedapi/types/nodeinfoxpacksecurityssl.go +++ b/typedapi/types/nodeinfoxpacksecurityssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodeInfoXpackSecuritySsl type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L241-L243 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L241-L243 type NodeInfoXpackSecuritySsl struct { Ssl map[string]string `json:"ssl"` } diff --git a/typedapi/types/nodejvminfo.go b/typedapi/types/nodejvminfo.go old mode 100755 new mode 100644 index 922b27b13d..f38c551e99 --- a/typedapi/types/nodejvminfo.go +++ b/typedapi/types/nodejvminfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeJvmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L348-L362 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L348-L362 type NodeJvmInfo struct { GcCollectors []string `json:"gc_collectors"` InputArguments []string `json:"input_arguments"` @@ -38,6 +48,112 @@ type NodeJvmInfo struct { VmVersion string `json:"vm_version"` } +func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "gc_collectors": + if err := dec.Decode(&s.GcCollectors); err != nil { + return err + } + + case "input_arguments": + if err := dec.Decode(&s.InputArguments); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "memory_pools": + if err := dec.Decode(&s.MemoryPools); err != nil { + return err + } + + case "pid": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Pid = value + case float64: + f := int(v) + s.Pid = f + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "using_bundled_jdk", "bundled_jdk": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UsingBundledJdk = value + case bool: + s.UsingBundledJdk = v + } + + case "using_compressed_ordinary_object_pointers": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UsingCompressedOrdinaryObjectPointers = o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "vm_name": + if err := dec.Decode(&s.VmName); err != nil { + return err + } + + case "vm_vendor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.VmVendor = o + + case "vm_version": + if err := dec.Decode(&s.VmVersion); err != nil { + return err + } + + } + } + return nil +} + // NewNodeJvmInfo returns a NodeJvmInfo. func NewNodeJvmInfo() *NodeJvmInfo { r := &NodeJvmInfo{} diff --git a/typedapi/types/nodeoperatingsysteminfo.go b/typedapi/types/nodeoperatingsysteminfo.go old mode 100755 new mode 100644 index 681e8fa5ea..8580592f84 --- a/typedapi/types/nodeoperatingsysteminfo.go +++ b/typedapi/types/nodeoperatingsysteminfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeOperatingSystemInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L364-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L364-L381 type NodeOperatingSystemInfo struct { // AllocatedProcessors The number of processors actually used to calculate thread pool size. This // number can be set with the node.processors setting of a node and defaults to @@ -44,6 +54,101 @@ type NodeOperatingSystemInfo struct { Version string `json:"version"` } +func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocated_processors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AllocatedProcessors = &value + case float64: + f := int(v) + s.AllocatedProcessors = &f + } + + case "arch": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Arch = o + + case "available_processors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AvailableProcessors = value + case float64: + f := int(v) + s.AvailableProcessors = f + } + + case "cpu": + if err := dec.Decode(&s.Cpu); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "pretty_name": + if err := dec.Decode(&s.PrettyName); err != nil { + return err + } + + case "refresh_interval_in_millis": + if err := dec.Decode(&s.RefreshIntervalInMillis); err != nil { + return err + } + + case "swap": + if err := dec.Decode(&s.Swap); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNodeOperatingSystemInfo returns a NodeOperatingSystemInfo. func NewNodeOperatingSystemInfo() *NodeOperatingSystemInfo { r := &NodeOperatingSystemInfo{} diff --git a/typedapi/types/nodepackagingtype.go b/typedapi/types/nodepackagingtype.go old mode 100755 new mode 100644 index 54ed7cb98b..68b361397e --- a/typedapi/types/nodepackagingtype.go +++ b/typedapi/types/nodepackagingtype.go @@ -16,19 +16,81 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodePackagingType type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L283-L287 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L283-L287 type NodePackagingType struct { Count int `json:"count"` Flavor string `json:"flavor"` Type string `json:"type"` } +func (s *NodePackagingType) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "flavor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flavor = o + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewNodePackagingType returns a NodePackagingType. func NewNodePackagingType() *NodePackagingType { r := &NodePackagingType{} diff --git a/typedapi/types/nodeprocessinfo.go b/typedapi/types/nodeprocessinfo.go old mode 100755 new mode 100644 index ae41340854..3e6f034d14 --- a/typedapi/types/nodeprocessinfo.go +++ b/typedapi/types/nodeprocessinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeProcessInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L383-L390 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L383-L390 type NodeProcessInfo struct { // Id Process identifier (PID) Id int64 `json:"id"` @@ -32,6 +42,60 @@ type NodeProcessInfo struct { RefreshIntervalInMillis int64 `json:"refresh_interval_in_millis"` } +func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = value + case float64: + f := int64(v) + s.Id = f + } + + case "mlockall": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Mlockall = value + case bool: + s.Mlockall = v + } + + case "refresh_interval_in_millis": + if err := dec.Decode(&s.RefreshIntervalInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewNodeProcessInfo returns a NodeProcessInfo. func NewNodeProcessInfo() *NodeProcessInfo { r := &NodeProcessInfo{} diff --git a/typedapi/types/nodereloaderror.go b/typedapi/types/nodereloaderror.go old mode 100755 new mode 100644 index 2860ddbca4..27a006588b --- a/typedapi/types/nodereloaderror.go +++ b/typedapi/types/nodereloaderror.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeReloadError type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/NodeReloadResult.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/NodeReloadResult.ts#L24-L27 type NodeReloadError struct { Name string `json:"name"` ReloadException *ErrorCause `json:"reload_exception,omitempty"` } +func (s *NodeReloadError) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "reload_exception": + if err := dec.Decode(&s.ReloadException); err != nil { + return err + } + + } + } + return nil +} + // NewNodeReloadError returns a NodeReloadError. func NewNodeReloadError() *NodeReloadError { r := &NodeReloadError{} diff --git a/typedapi/types/nodereloadresult.go b/typedapi/types/nodereloadresult.go old mode 100755 new mode 100644 index 45047e585f..26b2f8df93 --- a/typedapi/types/nodereloadresult.go +++ b/typedapi/types/nodereloadresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // Stats // NodeReloadError // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/NodeReloadResult.ts#L29-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/NodeReloadResult.ts#L29-L30 type NodeReloadResult interface{} diff --git a/typedapi/types/noderoles.go b/typedapi/types/noderoles.go old mode 100755 new mode 100644 index ae708c9242..b0159612ab --- a/typedapi/types/noderoles.go +++ b/typedapi/types/noderoles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ import ( // NodeRoles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L96-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L96-L99 type NodeRoles []noderole.NodeRole diff --git a/typedapi/types/nodescontext.go b/typedapi/types/nodescontext.go old mode 100755 new mode 100644 index 8d34d0f63d..98c37acaa4 --- a/typedapi/types/nodescontext.go +++ b/typedapi/types/nodescontext.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodesContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L397-L402 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L397-L402 type NodesContext struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` @@ -30,6 +40,79 @@ type NodesContext struct { Context *string `json:"context,omitempty"` } +func (s *NodesContext) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CacheEvictions = &value + case float64: + f := int64(v) + s.CacheEvictions = &f + } + + case "compilation_limit_triggered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CompilationLimitTriggered = &value + case float64: + f := int64(v) + s.CompilationLimitTriggered = &f + } + + case "compilations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Compilations = &value + case float64: + f := int64(v) + s.Compilations = &f + } + + case "context": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Context = &o + + } + } + return nil +} + // NewNodesContext returns a NodesContext. func NewNodesContext() *NodesContext { r := &NodesContext{} diff --git a/typedapi/types/nodescredentials.go b/typedapi/types/nodescredentials.go old mode 100755 new mode 100644 index 9ff9f63b20..ccedae851c --- a/typedapi/types/nodescredentials.go +++ b/typedapi/types/nodescredentials.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodesCredentials type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_service_credentials/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_service_credentials/types.ts#L23-L28 type NodesCredentials struct { // FileTokens File-backed tokens collected from all nodes FileTokens map[string]NodesCredentialsFileToken `json:"file_tokens"` diff --git a/typedapi/types/nodescredentialsfiletoken.go b/typedapi/types/nodescredentialsfiletoken.go old mode 100755 new mode 100644 index 296e2aac5f..d1fcfac08f --- a/typedapi/types/nodescredentialsfiletoken.go +++ b/typedapi/types/nodescredentialsfiletoken.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodesCredentialsFileToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_service_credentials/types.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_service_credentials/types.ts#L30-L32 type NodesCredentialsFileToken struct { Nodes []string `json:"nodes"` } diff --git a/typedapi/types/nodeshard.go b/typedapi/types/nodeshard.go old mode 100755 new mode 100644 index 99c02db6f7..5cb3548f16 --- a/typedapi/types/nodeshard.go +++ b/typedapi/types/nodeshard.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shardroutingstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NodeShard type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L59-L70 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L59-L70 type NodeShard struct { AllocationId map[string]string `json:"allocation_id,omitempty"` Index string `json:"index"` @@ -40,6 +48,102 @@ type NodeShard struct { UnassignedInfo *UnassignedInformation `json:"unassigned_info,omitempty"` } +func (s *NodeShard) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation_id": + if s.AllocationId == nil { + s.AllocationId = make(map[string]string, 0) + } + if err := dec.Decode(&s.AllocationId); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Primary = value + case bool: + s.Primary = v + } + + case "recovery_source": + if s.RecoverySource == nil { + s.RecoverySource = make(map[string]string, 0) + } + if err := dec.Decode(&s.RecoverySource); err != nil { + return err + } + + case "relocating_node": + if err := dec.Decode(&s.RelocatingNode); err != nil { + return err + } + + case "relocation_failure_info": + if err := dec.Decode(&s.RelocationFailureInfo); err != nil { + return err + } + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "unassigned_info": + if err := dec.Decode(&s.UnassignedInfo); err != nil { + return err + } + + } + } + return nil +} + // NewNodeShard returns a NodeShard. func NewNodeShard() *NodeShard { r := &NodeShard{ diff --git a/typedapi/types/nodeshutdownstatus.go b/typedapi/types/nodeshutdownstatus.go old mode 100755 new mode 100644 index 67341f7d41..b3921a0b97 --- a/typedapi/types/nodeshutdownstatus.go +++ b/typedapi/types/nodeshutdownstatus.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shutdownstatus" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shutdowntype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // NodeShutdownStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 type NodeShutdownStatus struct { NodeId string `json:"node_id"` PersistentTasks PersistentTaskStatus `json:"persistent_tasks"` @@ -39,6 +45,69 @@ type NodeShutdownStatus struct { Type shutdowntype.ShutdownType `json:"type"` } +func (s *NodeShutdownStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "persistent_tasks": + if err := dec.Decode(&s.PersistentTasks); err != nil { + return err + } + + case "plugins": + if err := dec.Decode(&s.Plugins); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "shard_migration": + if err := dec.Decode(&s.ShardMigration); err != nil { + return err + } + + case "shutdown_startedmillis": + if err := dec.Decode(&s.ShutdownStartedmillis); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewNodeShutdownStatus returns a NodeShutdownStatus. func NewNodeShutdownStatus() *NodeShutdownStatus { r := &NodeShutdownStatus{} diff --git a/typedapi/types/nodesindexingpressure.go b/typedapi/types/nodesindexingpressure.go old mode 100755 new mode 100644 index 742966b1ec..48f68bfd83 --- a/typedapi/types/nodesindexingpressure.go +++ b/typedapi/types/nodesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L55-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L55-L57 type NodesIndexingPressure struct { Memory *NodesIndexingPressureMemory `json:"memory,omitempty"` } diff --git a/typedapi/types/nodesindexingpressurememory.go b/typedapi/types/nodesindexingpressurememory.go old mode 100755 new mode 100644 index 090a3e7dfa..3359d7b250 --- a/typedapi/types/nodesindexingpressurememory.go +++ b/typedapi/types/nodesindexingpressurememory.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L59-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L59-L64 type NodesIndexingPressureMemory struct { Current *PressureMemory `json:"current,omitempty"` Limit ByteSize `json:"limit,omitempty"` @@ -30,6 +40,56 @@ type NodesIndexingPressureMemory struct { Total *PressureMemory `json:"total,omitempty"` } +func (s *NodesIndexingPressureMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current": + if err := dec.Decode(&s.Current); err != nil { + return err + } + + case "limit": + if err := dec.Decode(&s.Limit); err != nil { + return err + } + + case "limit_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LimitInBytes = &value + case float64: + f := int64(v) + s.LimitInBytes = &f + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewNodesIndexingPressureMemory returns a NodesIndexingPressureMemory. func NewNodesIndexingPressureMemory() *NodesIndexingPressureMemory { r := &NodesIndexingPressureMemory{} diff --git a/typedapi/types/nodesingest.go b/typedapi/types/nodesingest.go old mode 100755 new mode 100644 index 8190eaa476..a5183894d0 --- a/typedapi/types/nodesingest.go +++ b/typedapi/types/nodesingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // NodesIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L144-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L144-L147 type NodesIngest struct { Pipelines map[string]IngestTotal `json:"pipelines,omitempty"` Total *IngestTotal `json:"total,omitempty"` diff --git a/typedapi/types/nodesrecord.go b/typedapi/types/nodesrecord.go old mode 100755 new mode 100644 index bb48f38bd3..be52253480 --- a/typedapi/types/nodesrecord.go +++ b/typedapi/types/nodesrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/nodes/types.ts#L23-L541 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/nodes/types.ts#L23-L541 type NodesRecord struct { // Build es build hash Build *string `json:"build,omitempty"` @@ -219,6 +227,764 @@ type NodesRecord struct { Version *string `json:"version,omitempty"` } +func (s *NodesRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "build", "b": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Build = &o + + case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgSizeInBytes = &o + + case "bulk.avg_time", "bati", "bulkAvgTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgTime = &o + + case "bulk.total_operations", "bto", "bulkTotalOperations": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalOperations = &o + + case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalSizeInBytes = &o + + case "bulk.total_time", "btti", "bulkTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalTime = &o + + case "completion.size", "cs", "completionSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompletionSize = &o + + case "cpu": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Cpu = &o + + case "disk.avail", "d", "da", "disk", "diskAvail": + if err := dec.Decode(&s.DiskAvail); err != nil { + return err + } + + case "disk.total", "dt", "diskTotal": + if err := dec.Decode(&s.DiskTotal); err != nil { + return err + } + + case "disk.used", "du", "diskUsed": + if err := dec.Decode(&s.DiskUsed); err != nil { + return err + } + + case "disk.used_percent", "dup", "diskUsedPercent": + if err := dec.Decode(&s.DiskUsedPercent); err != nil { + return err + } + + case "fielddata.evictions", "fe", "fielddataEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataEvictions = &o + + case "fielddata.memory_size", "fm", "fielddataMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataMemorySize = &o + + case "file_desc.current", "fdc", "fileDescriptorCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FileDescCurrent = &o + + case "file_desc.max", "fdm", "fileDescriptorMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FileDescMax = &o + + case "file_desc.percent", "fdp", "fileDescriptorPercent": + if err := dec.Decode(&s.FileDescPercent); err != nil { + return err + } + + case "flavor", "f": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flavor = &o + + case "flush.total", "ft", "flushTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotal = &o + + case "flush.total_time", "ftt", "flushTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotalTime = &o + + case "get.current", "gc", "getCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetCurrent = &o + + case "get.exists_time", "geti", "getExistsTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTime = &o + + case "get.exists_total", "geto", "getExistsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTotal = &o + + case "get.missing_time", "gmti", "getMissingTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTime = &o + + case "get.missing_total", "gmto", "getMissingTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTotal = &o + + case "get.time", "gti", "getTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTime = &o + + case "get.total", "gto", "getTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTotal = &o + + case "heap.current", "hc", "heapCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HeapCurrent = &o + + case "heap.max", "hm", "heapMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HeapMax = &o + + case "heap.percent", "hp", "heapPercent": + if err := dec.Decode(&s.HeapPercent); err != nil { + return err + } + + case "http_address", "http": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HttpAddress = &o + + case "id", "nodeId": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "indexing.delete_current", "idc", "indexingDeleteCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteCurrent = &o + + case "indexing.delete_time", "idti", "indexingDeleteTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTime = &o + + case "indexing.delete_total", "idto", "indexingDeleteTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTotal = &o + + case "indexing.index_current", "iic", "indexingIndexCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexCurrent = &o + + case "indexing.index_failed", "iif", "indexingIndexFailed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexFailed = &o + + case "indexing.index_time", "iiti", "indexingIndexTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTime = &o + + case "indexing.index_total", "iito", "indexingIndexTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTotal = &o + + case "ip", "i": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Ip = &o + + case "jdk", "j": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Jdk = &o + + case "load_15m", "l": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Load15M = &o + + case "load_1m": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Load1M = &o + + case "load_5m": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Load5M = &o + + case "master", "m": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Master = &o + + case "merges.current", "mc", "mergesCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrent = &o + + case "merges.current_docs", "mcd", "mergesCurrentDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentDocs = &o + + case "merges.current_size", "mcs", "mergesCurrentSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentSize = &o + + case "merges.total", "mt", "mergesTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotal = &o + + case "merges.total_docs", "mtd", "mergesTotalDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalDocs = &o + + case "merges.total_size", "mts", "mergesTotalSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalSize = &o + + case "merges.total_time", "mtt", "mergesTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalTime = &o + + case "name", "n": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "node.role", "r", "role", "nodeRole": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeRole = &o + + case "pid", "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pid = &o + + case "port", "po": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Port = &o + + case "query_cache.evictions", "qce", "queryCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheEvictions = &o + + case "query_cache.hit_count", "qchc", "queryCacheHitCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheHitCount = &o + + case "query_cache.memory_size", "qcm", "queryCacheMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheMemorySize = &o + + case "query_cache.miss_count", "qcmc", "queryCacheMissCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheMissCount = &o + + case "ram.current", "rc", "ramCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RamCurrent = &o + + case "ram.max", "rn", "ramMax": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RamMax = &o + + case "ram.percent", "rp", "ramPercent": + if err := dec.Decode(&s.RamPercent); err != nil { + return err + } + + case "refresh.external_time", "rti", "refreshTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTime = &o + + case "refresh.external_total", "rto", "refreshTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTotal = &o + + case "refresh.listeners", "rli", "refreshListeners": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshListeners = &o + + case "refresh.time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTime = &o + + case "refresh.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTotal = &o + + case "request_cache.evictions", "rce", "requestCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheEvictions = &o + + case "request_cache.hit_count", "rchc", "requestCacheHitCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheHitCount = &o + + case "request_cache.memory_size", "rcm", "requestCacheMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheMemorySize = &o + + case "request_cache.miss_count", "rcmc", "requestCacheMissCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RequestCacheMissCount = &o + + case "script.cache_evictions", "scrce", "scriptCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ScriptCacheEvictions = &o + + case "script.compilation_limit_triggered", "scrclt", "scriptCacheCompilationLimitTriggered": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ScriptCompilationLimitTriggered = &o + + case "script.compilations", "scrcc", "scriptCompilations": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ScriptCompilations = &o + + case "search.fetch_current", "sfc", "searchFetchCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchCurrent = &o + + case "search.fetch_time", "sfti", "searchFetchTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTime = &o + + case "search.fetch_total", "sfto", "searchFetchTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTotal = &o + + case "search.open_contexts", "so", "searchOpenContexts": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchOpenContexts = &o + + case "search.query_current", "sqc", "searchQueryCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryCurrent = &o + + case "search.query_time", "sqti", "searchQueryTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTime = &o + + case "search.query_total", "sqto", "searchQueryTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTotal = &o + + case "search.scroll_current", "scc", "searchScrollCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollCurrent = &o + + case "search.scroll_time", "scti", "searchScrollTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTime = &o + + case "search.scroll_total", "scto", "searchScrollTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTotal = &o + + case "segments.count", "sc", "segmentsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsCount = &o + + case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsFixedBitsetMemory = &o + + case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsIndexWriterMemory = &o + + case "segments.memory", "sm", "segmentsMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsMemory = &o + + case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsVersionMapMemory = &o + + case "suggest.current", "suc", "suggestCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestCurrent = &o + + case "suggest.time", "suti", "suggestTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestTime = &o + + case "suggest.total", "suto", "suggestTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SuggestTotal = &o + + case "type", "t": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + case "uptime", "u": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Uptime = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNodesRecord returns a NodesRecord. func NewNodesRecord() *NodesRecord { r := &NodesRecord{} diff --git a/typedapi/types/nodestatistics.go b/typedapi/types/nodestatistics.go old mode 100755 new mode 100644 index 648d8d9e2d..ba4b502171 --- a/typedapi/types/nodestatistics.go +++ b/typedapi/types/nodestatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L28-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L28-L39 type NodeStatistics struct { // Failed Number of nodes that rejected the request or failed to respond. If this value // is not 0, a reason for the rejection or failure is included in the response. @@ -34,6 +44,79 @@ type NodeStatistics struct { Total int `json:"total"` } +func (s *NodeStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "failed": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Failed = value + case float64: + f := int(v) + s.Failed = f + } + + case "failures": + if err := dec.Decode(&s.Failures); err != nil { + return err + } + + case "successful": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Successful = value + case float64: + f := int(v) + s.Successful = f + } + + case "total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Total = value + case float64: + f := int(v) + s.Total = f + } + + } + } + return nil +} + // NewNodeStatistics returns a NodeStatistics. func NewNodeStatistics() *NodeStatistics { r := &NodeStatistics{} diff --git a/typedapi/types/nodetasks.go b/typedapi/types/nodetasks.go old mode 100755 new mode 100644 index 9fe56ea036..9440ef3c0e --- a/typedapi/types/nodetasks.go +++ b/typedapi/types/nodetasks.go @@ -16,28 +16,97 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NodeTasks type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 type NodeTasks struct { Attributes map[string]string `json:"attributes,omitempty"` Host *string `json:"host,omitempty"` Ip *string `json:"ip,omitempty"` Name *string `json:"name,omitempty"` Roles []string `json:"roles,omitempty"` - Tasks map[TaskId]TaskInfo `json:"tasks"` + Tasks map[string]TaskInfo `json:"tasks"` TransportAddress *string `json:"transport_address,omitempty"` } +func (s *NodeTasks) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "tasks": + if s.Tasks == nil { + s.Tasks = make(map[string]TaskInfo, 0) + } + if err := dec.Decode(&s.Tasks); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewNodeTasks returns a NodeTasks. func NewNodeTasks() *NodeTasks { r := &NodeTasks{ Attributes: make(map[string]string, 0), - Tasks: make(map[TaskId]TaskInfo, 0), + Tasks: make(map[string]TaskInfo, 0), } return r diff --git a/typedapi/types/nodethreadpoolinfo.go b/typedapi/types/nodethreadpoolinfo.go old mode 100755 new mode 100644 index 7d0910225d..b79995c1e8 --- a/typedapi/types/nodethreadpoolinfo.go +++ b/typedapi/types/nodethreadpoolinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // NodeThreadPoolInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/info/types.ts#L286-L293 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/info/types.ts#L286-L293 type NodeThreadPoolInfo struct { Core *int `json:"core,omitempty"` KeepAlive Duration `json:"keep_alive,omitempty"` @@ -32,6 +42,103 @@ type NodeThreadPoolInfo struct { Type string `json:"type"` } +func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "core": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Core = &value + case float64: + f := int(v) + s.Core = &f + } + + case "keep_alive": + if err := dec.Decode(&s.KeepAlive); err != nil { + return err + } + + case "max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Max = &value + case float64: + f := int(v) + s.Max = &f + } + + case "queue_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueSize = value + case float64: + f := int(v) + s.QueueSize = f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewNodeThreadPoolInfo returns a NodeThreadPoolInfo. func NewNodeThreadPoolInfo() *NodeThreadPoolInfo { r := &NodeThreadPoolInfo{} diff --git a/typedapi/types/nodeusage.go b/typedapi/types/nodeusage.go old mode 100755 new mode 100644 index 542553d604..516a3beb93 --- a/typedapi/types/nodeusage.go +++ b/typedapi/types/nodeusage.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // NodeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/usage/types.ts#L25-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/usage/types.ts#L25-L30 type NodeUsage struct { Aggregations map[string]json.RawMessage `json:"aggregations"` RestActions map[string]int `json:"rest_actions"` @@ -34,6 +38,52 @@ type NodeUsage struct { Timestamp int64 `json:"timestamp"` } +func (s *NodeUsage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Aggregations); err != nil { + return err + } + + case "rest_actions": + if s.RestActions == nil { + s.RestActions = make(map[string]int, 0) + } + if err := dec.Decode(&s.RestActions); err != nil { + return err + } + + case "since": + if err := dec.Decode(&s.Since); err != nil { + return err + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewNodeUsage returns a NodeUsage. func NewNodeUsage() *NodeUsage { r := &NodeUsage{ diff --git a/typedapi/types/norianalyzer.go b/typedapi/types/norianalyzer.go old mode 100755 new mode 100644 index f11192712f..309802c3b9 --- a/typedapi/types/norianalyzer.go +++ b/typedapi/types/norianalyzer.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noridecompoundmode" + + "bytes" + "errors" + "io" + + "encoding/json" ) // NoriAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L66-L72 type NoriAnalyzer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` Stoptags []string `json:"stoptags,omitempty"` @@ -35,6 +41,54 @@ type NoriAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *NoriAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decompound_mode": + if err := dec.Decode(&s.DecompoundMode); err != nil { + return err + } + + case "stoptags": + if err := dec.Decode(&s.Stoptags); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "user_dictionary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UserDictionary = &o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNoriAnalyzer returns a NoriAnalyzer. func NewNoriAnalyzer() *NoriAnalyzer { r := &NoriAnalyzer{} diff --git a/typedapi/types/noripartofspeechtokenfilter.go b/typedapi/types/noripartofspeechtokenfilter.go old mode 100755 new mode 100644 index 2417e8fc6f..9e7888b163 --- a/typedapi/types/noripartofspeechtokenfilter.go +++ b/typedapi/types/noripartofspeechtokenfilter.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // NoriPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L272-L275 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L273-L276 type NoriPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *NoriPartOfSpeechTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "stoptags": + if err := dec.Decode(&s.Stoptags); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNoriPartOfSpeechTokenFilter returns a NoriPartOfSpeechTokenFilter. func NewNoriPartOfSpeechTokenFilter() *NoriPartOfSpeechTokenFilter { r := &NoriPartOfSpeechTokenFilter{} diff --git a/typedapi/types/noritokenizer.go b/typedapi/types/noritokenizer.go old mode 100755 new mode 100644 index 53236d6b01..078f88cce1 --- a/typedapi/types/noritokenizer.go +++ b/typedapi/types/noritokenizer.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noridecompoundmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NoriTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L80-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L80-L86 type NoriTokenizer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -36,6 +44,68 @@ type NoriTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decompound_mode": + if err := dec.Decode(&s.DecompoundMode); err != nil { + return err + } + + case "discard_punctuation": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DiscardPunctuation = &value + case bool: + s.DiscardPunctuation = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "user_dictionary": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UserDictionary = &o + + case "user_dictionary_rules": + if err := dec.Decode(&s.UserDictionaryRules); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewNoriTokenizer returns a NoriTokenizer. func NewNoriTokenizer() *NoriTokenizer { r := &NoriTokenizer{} diff --git a/typedapi/types/normalizeaggregation.go b/typedapi/types/normalizeaggregation.go old mode 100755 new mode 100644 index 636109e5b2..0813a4eb52 --- a/typedapi/types/normalizeaggregation.go +++ b/typedapi/types/normalizeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -33,18 +33,19 @@ import ( // NormalizeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L262-L264 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L262-L264 type NormalizeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` Method *normalizemethod.NormalizeMethod `json:"method,omitempty"` Name *string `json:"name,omitempty"` } func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,9 +65,12 @@ func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -84,9 +88,12 @@ func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/normalizer.go b/typedapi/types/normalizer.go old mode 100755 new mode 100644 index 4b8cca88c4..fa62904b1a --- a/typedapi/types/normalizer.go +++ b/typedapi/types/normalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // LowercaseNormalizer // CustomNormalizer // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/normalizers.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/normalizers.ts#L20-L24 type Normalizer interface{} diff --git a/typedapi/types/numberrangequery.go b/typedapi/types/numberrangequery.go old mode 100755 new mode 100644 index af667576b7..26af0a0ea3 --- a/typedapi/types/numberrangequery.go +++ b/typedapi/types/numberrangequery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // NumberRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L83-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L83-L90 type NumberRangeQuery struct { Boost *float32 `json:"boost,omitempty"` From Float64 `json:"from,omitempty"` @@ -39,6 +47,129 @@ type NumberRangeQuery struct { To Float64 `json:"to,omitempty"` } +func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "from": + if err := dec.Decode(&s.From); err != nil { + return err + } + + case "gt": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gt = &f + case float64: + f := Float64(v) + s.Gt = &f + } + + case "gte": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Gte = &f + case float64: + f := Float64(v) + s.Gte = &f + } + + case "lt": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lt = &f + case float64: + f := Float64(v) + s.Lt = &f + } + + case "lte": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lte = &f + case float64: + f := Float64(v) + s.Lte = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return err + } + + case "to": + if err := dec.Decode(&s.To); err != nil { + return err + } + + } + } + return nil +} + // NewNumberRangeQuery returns a NumberRangeQuery. func NewNumberRangeQuery() *NumberRangeQuery { r := &NumberRangeQuery{} diff --git a/typedapi/types/numericdecayfunction.go b/typedapi/types/numericdecayfunction.go old mode 100755 new mode 100644 index 7513bb363b..9a7ce646c7 --- a/typedapi/types/numericdecayfunction.go +++ b/typedapi/types/numericdecayfunction.go @@ -16,23 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // NumericDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L88-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L88-L90 type NumericDecayFunction struct { MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` - NumericDecayFunction map[string]DecayPlacementdoubledouble `json:"-"` + NumericDecayFunction map[string]DecayPlacementdoubledouble `json:"NumericDecayFunction,omitempty"` +} + +func (s *NumericDecayFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "multi_value_mode": + if err := dec.Decode(&s.MultiValueMode); err != nil { + return err + } + + case "NumericDecayFunction": + if s.NumericDecayFunction == nil { + s.NumericDecayFunction = make(map[string]DecayPlacementdoubledouble, 0) + } + if err := dec.Decode(&s.NumericDecayFunction); err != nil { + return err + } + + default: + + } + } + return nil } // MarhsalJSON overrides marshalling for types with additional properties @@ -54,6 +94,7 @@ func (s NumericDecayFunction) MarshalJSON() ([]byte, error) { for key, value := range s.NumericDecayFunction { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "NumericDecayFunction") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/numericfielddata.go b/typedapi/types/numericfielddata.go old mode 100755 new mode 100644 index 98adf7ff01..d40e8b1bd5 --- a/typedapi/types/numericfielddata.go +++ b/typedapi/types/numericfielddata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // NumericFielddata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/NumericFielddata.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/NumericFielddata.ts#L22-L24 type NumericFielddata struct { Format numericfielddataformat.NumericFielddataFormat `json:"format"` } diff --git a/typedapi/types/objectproperty.go b/typedapi/types/objectproperty.go old mode 100755 new mode 100644 index e791ffae55..250a393c4f --- a/typedapi/types/objectproperty.go +++ b/typedapi/types/objectproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ObjectProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/complex.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/complex.ts#L46-L49 type ObjectProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -48,6 +50,7 @@ type ObjectProperty struct { } func (s *ObjectProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,8 +65,19 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "dynamic": @@ -72,11 +86,23 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { } case "enabled": - if err := dec.Decode(&s.Enabled); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -364,23 +390,40 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -668,20 +711,32 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/onehotencodingpreprocessor.go b/typedapi/types/onehotencodingpreprocessor.go old mode 100755 new mode 100644 index 04b4e6841f..b70dab6cce --- a/typedapi/types/onehotencodingpreprocessor.go +++ b/typedapi/types/onehotencodingpreprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // OneHotEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L44-L47 type OneHotEncodingPreprocessor struct { Field string `json:"field"` HotMap map[string]string `json:"hot_map"` diff --git a/typedapi/types/operatingsystem.go b/typedapi/types/operatingsystem.go old mode 100755 new mode 100644 index 30acbb3413..d1ecc7f385 --- a/typedapi/types/operatingsystem.go +++ b/typedapi/types/operatingsystem.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // OperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L373-L379 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L373-L379 type OperatingSystem struct { Cgroup *Cgroup `json:"cgroup,omitempty"` Cpu *Cpu `json:"cpu,omitempty"` @@ -31,6 +41,61 @@ type OperatingSystem struct { Timestamp *int64 `json:"timestamp,omitempty"` } +func (s *OperatingSystem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cgroup": + if err := dec.Decode(&s.Cgroup); err != nil { + return err + } + + case "cpu": + if err := dec.Decode(&s.Cpu); err != nil { + return err + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "swap": + if err := dec.Decode(&s.Swap); err != nil { + return err + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + } + } + return nil +} + // NewOperatingSystem returns a OperatingSystem. func NewOperatingSystem() *OperatingSystem { r := &OperatingSystem{} diff --git a/typedapi/types/operatingsystemmemoryinfo.go b/typedapi/types/operatingsystemmemoryinfo.go old mode 100755 new mode 100644 index 29c1237797..66bb8e3f6b --- a/typedapi/types/operatingsystemmemoryinfo.go +++ b/typedapi/types/operatingsystemmemoryinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // OperatingSystemMemoryInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/stats/types.ts#L289-L297 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/stats/types.ts#L289-L297 type OperatingSystemMemoryInfo struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes int64 `json:"free_in_bytes"` @@ -32,6 +42,118 @@ type OperatingSystemMemoryInfo struct { UsedPercent int `json:"used_percent"` } +func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adjusted_total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AdjustedTotalInBytes = &value + case float64: + f := int64(v) + s.AdjustedTotalInBytes = &f + } + + case "free_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FreeInBytes = value + case float64: + f := int64(v) + s.FreeInBytes = f + } + + case "free_percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FreePercent = value + case float64: + f := int(v) + s.FreePercent = f + } + + case "total_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalInBytes = value + case float64: + f := int64(v) + s.TotalInBytes = f + } + + case "used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedInBytes = value + case float64: + f := int64(v) + s.UsedInBytes = f + } + + case "used_percent": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UsedPercent = value + case float64: + f := int(v) + s.UsedPercent = f + } + + } + } + return nil +} + // NewOperatingSystemMemoryInfo returns a OperatingSystemMemoryInfo. func NewOperatingSystemMemoryInfo() *OperatingSystemMemoryInfo { r := &OperatingSystemMemoryInfo{} diff --git a/typedapi/types/outlierdetectionparameters.go b/typedapi/types/outlierdetectionparameters.go old mode 100755 new mode 100644 index 68699a491f..475b255940 --- a/typedapi/types/outlierdetectionparameters.go +++ b/typedapi/types/outlierdetectionparameters.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // OutlierDetectionParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L412-L419 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L412-L419 type OutlierDetectionParameters struct { ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` FeatureInfluenceThreshold *Float64 `json:"feature_influence_threshold,omitempty"` @@ -32,6 +42,110 @@ type OutlierDetectionParameters struct { StandardizationEnabled *bool `json:"standardization_enabled,omitempty"` } +func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compute_feature_influence": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ComputeFeatureInfluence = &value + case bool: + s.ComputeFeatureInfluence = &v + } + + case "feature_influence_threshold": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.FeatureInfluenceThreshold = &f + case float64: + f := Float64(v) + s.FeatureInfluenceThreshold = &f + } + + case "method": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Method = &o + + case "n_neighbors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NNeighbors = &value + case float64: + f := int(v) + s.NNeighbors = &f + } + + case "outlier_fraction": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.OutlierFraction = &f + case float64: + f := Float64(v) + s.OutlierFraction = &f + } + + case "standardization_enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StandardizationEnabled = &value + case bool: + s.StandardizationEnabled = &v + } + + } + } + return nil +} + // NewOutlierDetectionParameters returns a OutlierDetectionParameters. func NewOutlierDetectionParameters() *OutlierDetectionParameters { r := &OutlierDetectionParameters{} diff --git a/typedapi/types/overallbucket.go b/typedapi/types/overallbucket.go old mode 100755 new mode 100644 index acd794164a..8c8aa1a1e4 --- a/typedapi/types/overallbucket.go +++ b/typedapi/types/overallbucket.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // OverallBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Bucket.ts#L130-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Bucket.ts#L130-L145 type OverallBucket struct { // BucketSpan The length of the bucket in seconds. Matches the job with the longest // bucket_span value. @@ -42,6 +52,84 @@ type OverallBucket struct { TimestampString DateTime `json:"timestamp_string"` } +func (s *OverallBucket) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bucket_span": + if err := dec.Decode(&s.BucketSpan); err != nil { + return err + } + + case "is_interim": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsInterim = value + case bool: + s.IsInterim = v + } + + case "jobs": + if err := dec.Decode(&s.Jobs); err != nil { + return err + } + + case "overall_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.OverallScore = f + case float64: + f := Float64(v) + s.OverallScore = f + } + + case "result_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultType = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + case "timestamp_string": + if err := dec.Decode(&s.TimestampString); err != nil { + return err + } + + } + } + return nil +} + // NewOverallBucket returns a OverallBucket. func NewOverallBucket() *OverallBucket { r := &OverallBucket{} diff --git a/typedapi/types/overallbucketjob.go b/typedapi/types/overallbucketjob.go old mode 100755 new mode 100644 index 38ca9bc2e8..90521eb812 --- a/typedapi/types/overallbucketjob.go +++ b/typedapi/types/overallbucketjob.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // OverallBucketJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Bucket.ts#L146-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Bucket.ts#L146-L149 type OverallBucketJob struct { JobId string `json:"job_id"` MaxAnomalyScore Float64 `json:"max_anomaly_score"` } +func (s *OverallBucketJob) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "max_anomaly_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MaxAnomalyScore = f + case float64: + f := Float64(v) + s.MaxAnomalyScore = f + } + + } + } + return nil +} + // NewOverallBucketJob returns a OverallBucketJob. func NewOverallBucketJob() *OverallBucketJob { r := &OverallBucketJob{} diff --git a/typedapi/types/overlapping.go b/typedapi/types/overlapping.go old mode 100755 new mode 100644 index 9613c3c68a..e7ae8d986f --- a/typedapi/types/overlapping.go +++ b/typedapi/types/overlapping.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Overlapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 type Overlapping struct { IndexPatterns []string `json:"index_patterns"` Name string `json:"name"` } +func (s *Overlapping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index_patterns": + if err := dec.Decode(&s.IndexPatterns); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewOverlapping returns a Overlapping. func NewOverlapping() *Overlapping { r := &Overlapping{} diff --git a/typedapi/types/page.go b/typedapi/types/page.go old mode 100755 new mode 100644 index ea65e3a4fc..dad8bd65a9 --- a/typedapi/types/page.go +++ b/typedapi/types/page.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Page type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Page.ts#L22-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Page.ts#L22-L33 type Page struct { // From Skips the specified number of items. From *int `json:"from,omitempty"` @@ -30,6 +40,58 @@ type Page struct { Size *int `json:"size,omitempty"` } +func (s *Page) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "from": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.From = &value + case float64: + f := int(v) + s.From = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + // NewPage returns a Page. func NewPage() *Page { r := &Page{} diff --git a/typedapi/types/pagerdutyaction.go b/typedapi/types/pagerdutyaction.go old mode 100755 new mode 100644 index ae13f98d8e..aaae3f40c0 --- a/typedapi/types/pagerdutyaction.go +++ b/typedapi/types/pagerdutyaction.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/pagerdutyeventtype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // PagerDutyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L54-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L54-L54 type PagerDutyAction struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -39,6 +47,95 @@ type PagerDutyAction struct { Proxy *PagerDutyEventProxy `json:"proxy,omitempty"` } +func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "account": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Account = &o + + case "attach_payload": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AttachPayload = value + case bool: + s.AttachPayload = v + } + + case "client": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Client = &o + + case "client_url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClientUrl = &o + + case "contexts", "context": + if err := dec.Decode(&s.Contexts); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "event_type": + if err := dec.Decode(&s.EventType); err != nil { + return err + } + + case "incident_key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IncidentKey = o + + case "proxy": + if err := dec.Decode(&s.Proxy); err != nil { + return err + } + + } + } + return nil +} + // NewPagerDutyAction returns a PagerDutyAction. func NewPagerDutyAction() *PagerDutyAction { r := &PagerDutyAction{} diff --git a/typedapi/types/pagerdutycontext.go b/typedapi/types/pagerdutycontext.go old mode 100755 new mode 100644 index 2d050be6ea..c704097f30 --- a/typedapi/types/pagerdutycontext.go +++ b/typedapi/types/pagerdutycontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // PagerDutyContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L61-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L61-L65 type PagerDutyContext struct { Href *string `json:"href,omitempty"` Src *string `json:"src,omitempty"` diff --git a/typedapi/types/pagerdutyevent.go b/typedapi/types/pagerdutyevent.go old mode 100755 new mode 100644 index 69e3a7e440..bfba3f27ac --- a/typedapi/types/pagerdutyevent.go +++ b/typedapi/types/pagerdutyevent.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/pagerdutyeventtype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // PagerDutyEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L40-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L40-L52 type PagerDutyEvent struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -39,6 +47,95 @@ type PagerDutyEvent struct { Proxy *PagerDutyEventProxy `json:"proxy,omitempty"` } +func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "account": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Account = &o + + case "attach_payload": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AttachPayload = value + case bool: + s.AttachPayload = v + } + + case "client": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Client = &o + + case "client_url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClientUrl = &o + + case "contexts", "context": + if err := dec.Decode(&s.Contexts); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "event_type": + if err := dec.Decode(&s.EventType); err != nil { + return err + } + + case "incident_key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IncidentKey = o + + case "proxy": + if err := dec.Decode(&s.Proxy); err != nil { + return err + } + + } + } + return nil +} + // NewPagerDutyEvent returns a PagerDutyEvent. func NewPagerDutyEvent() *PagerDutyEvent { r := &PagerDutyEvent{} diff --git a/typedapi/types/pagerdutyeventproxy.go b/typedapi/types/pagerdutyeventproxy.go old mode 100755 new mode 100644 index 2a51e40496..b3f11946f8 --- a/typedapi/types/pagerdutyeventproxy.go +++ b/typedapi/types/pagerdutyeventproxy.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PagerDutyEventProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L56-L59 type PagerDutyEventProxy struct { Host *string `json:"host,omitempty"` Port *int `json:"port,omitempty"` } +func (s *PagerDutyEventProxy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "port": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Port = &value + case float64: + f := int(v) + s.Port = &f + } + + } + } + return nil +} + // NewPagerDutyEventProxy returns a PagerDutyEventProxy. func NewPagerDutyEventProxy() *PagerDutyEventProxy { r := &PagerDutyEventProxy{} diff --git a/typedapi/types/pagerdutyresult.go b/typedapi/types/pagerdutyresult.go old mode 100755 new mode 100644 index 1ff4bfb05b..2853bd26d4 --- a/typedapi/types/pagerdutyresult.go +++ b/typedapi/types/pagerdutyresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PagerDutyResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L78-L83 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L78-L83 type PagerDutyResult struct { Event PagerDutyEvent `json:"event"` Reason *string `json:"reason,omitempty"` diff --git a/typedapi/types/painlesscontextsetup.go b/typedapi/types/painlesscontextsetup.go old mode 100755 new mode 100644 index 1c4357c2c5..3f423d15c1 --- a/typedapi/types/painlesscontextsetup.go +++ b/typedapi/types/painlesscontextsetup.go @@ -16,23 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // PainlessContextSetup type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/scripts_painless_execute/types.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/scripts_painless_execute/types.ts#L25-L29 type PainlessContextSetup struct { Document json.RawMessage `json:"document,omitempty"` Index string `json:"index"` Query Query `json:"query"` } +func (s *PainlessContextSetup) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "document": + if err := dec.Decode(&s.Document); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + } + } + return nil +} + // NewPainlessContextSetup returns a PainlessContextSetup. func NewPainlessContextSetup() *PainlessContextSetup { r := &PainlessContextSetup{} diff --git a/typedapi/types/parentaggregate.go b/typedapi/types/parentaggregate.go old mode 100755 new mode 100644 index 682eb2378f..2d54555730 --- a/typedapi/types/parentaggregate.go +++ b/typedapi/types/parentaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // ParentAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L770-L771 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L779-L780 type ParentAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *ParentAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s ParentAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/parentaggregation.go b/typedapi/types/parentaggregation.go old mode 100755 new mode 100644 index 6eec5f4112..bb86224825 --- a/typedapi/types/parentaggregation.go +++ b/typedapi/types/parentaggregation.go @@ -16,21 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ParentAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L285-L287 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L285-L287 type ParentAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Type *string `json:"type,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` +} + +func (s *ParentAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil } // NewParentAggregation returns a ParentAggregation. diff --git a/typedapi/types/parentidquery.go b/typedapi/types/parentidquery.go old mode 100755 new mode 100644 index 9ac26cae17..75459b1904 --- a/typedapi/types/parentidquery.go +++ b/typedapi/types/parentidquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ParentIdQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/joining.ts#L73-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/joining.ts#L73-L78 type ParentIdQuery struct { Boost *float32 `json:"boost,omitempty"` Id *string `json:"id,omitempty"` @@ -31,6 +41,74 @@ type ParentIdQuery struct { Type *string `json:"type,omitempty"` } +func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewParentIdQuery returns a ParentIdQuery. func NewParentIdQuery() *ParentIdQuery { r := &ParentIdQuery{} diff --git a/typedapi/types/parenttaskinfo.go b/typedapi/types/parenttaskinfo.go old mode 100755 new mode 100644 index 125567e37b..dbb192b760 --- a/typedapi/types/parenttaskinfo.go +++ b/typedapi/types/parenttaskinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ParentTaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 type ParentTaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -40,6 +50,136 @@ type ParentTaskInfo struct { Type string `json:"type"` } +func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Action = o + + case "cancellable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Cancellable = value + case bool: + s.Cancellable = v + } + + case "cancelled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Cancelled = &value + case bool: + s.Cancelled = &v + } + + case "children": + if err := dec.Decode(&s.Children); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = value + case float64: + f := int64(v) + s.Id = f + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "parent_task_id": + if err := dec.Decode(&s.ParentTaskId); err != nil { + return err + } + + case "running_time": + if err := dec.Decode(&s.RunningTime); err != nil { + return err + } + + case "running_time_in_nanos": + if err := dec.Decode(&s.RunningTimeInNanos); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewParentTaskInfo returns a ParentTaskInfo. func NewParentTaskInfo() *ParentTaskInfo { r := &ParentTaskInfo{ diff --git a/typedapi/types/passthroughinferenceoptions.go b/typedapi/types/passthroughinferenceoptions.go old mode 100755 new mode 100644 index 0f8e943080..96a0cff200 --- a/typedapi/types/passthroughinferenceoptions.go +++ b/typedapi/types/passthroughinferenceoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PassThroughInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L209-L216 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L209-L216 type PassThroughInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/passthroughinferenceupdateoptions.go b/typedapi/types/passthroughinferenceupdateoptions.go old mode 100755 new mode 100644 index 975f54d4ec..6a8699bd97 --- a/typedapi/types/passthroughinferenceupdateoptions.go +++ b/typedapi/types/passthroughinferenceupdateoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PassThroughInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L350-L355 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L350-L355 type PassThroughInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/pathhierarchytokenizer.go b/typedapi/types/pathhierarchytokenizer.go old mode 100755 new mode 100644 index 39588cccec..09116e7fda --- a/typedapi/types/pathhierarchytokenizer.go +++ b/typedapi/types/pathhierarchytokenizer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PathHierarchyTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L88-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L88-L95 type PathHierarchyTokenizer struct { BufferSize int `json:"buffer_size"` Delimiter string `json:"delimiter"` @@ -33,6 +43,98 @@ type PathHierarchyTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *PathHierarchyTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buffer_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.BufferSize = value + case float64: + f := int(v) + s.BufferSize = f + } + + case "delimiter": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Delimiter = o + + case "replacement": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Replacement = o + + case "reverse": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Reverse = value + case bool: + s.Reverse = v + } + + case "skip": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Skip = value + case float64: + f := int(v) + s.Skip = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPathHierarchyTokenizer returns a PathHierarchyTokenizer. func NewPathHierarchyTokenizer() *PathHierarchyTokenizer { r := &PathHierarchyTokenizer{} diff --git a/typedapi/types/patternanalyzer.go b/typedapi/types/patternanalyzer.go old mode 100755 new mode 100644 index 21dab97c9f..ed7710f928 --- a/typedapi/types/patternanalyzer.go +++ b/typedapi/types/patternanalyzer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PatternAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L74-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L74-L81 type PatternAnalyzer struct { Flags *string `json:"flags,omitempty"` Lowercase *bool `json:"lowercase,omitempty"` @@ -32,6 +42,82 @@ type PatternAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = &o + + case "lowercase": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lowercase = &value + case bool: + s.Lowercase = &v + } + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPatternAnalyzer returns a PatternAnalyzer. func NewPatternAnalyzer() *PatternAnalyzer { r := &PatternAnalyzer{} diff --git a/typedapi/types/patterncapturetokenfilter.go b/typedapi/types/patterncapturetokenfilter.go old mode 100755 new mode 100644 index b80d57bc26..133c23a81d --- a/typedapi/types/patterncapturetokenfilter.go +++ b/typedapi/types/patterncapturetokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PatternCaptureTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L277-L281 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L278-L282 type PatternCaptureTokenFilter struct { Patterns []string `json:"patterns"` PreserveOriginal *bool `json:"preserve_original,omitempty"` @@ -30,6 +40,55 @@ type PatternCaptureTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *PatternCaptureTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "patterns": + if err := dec.Decode(&s.Patterns); err != nil { + return err + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPatternCaptureTokenFilter returns a PatternCaptureTokenFilter. func NewPatternCaptureTokenFilter() *PatternCaptureTokenFilter { r := &PatternCaptureTokenFilter{} diff --git a/typedapi/types/patternreplacecharfilter.go b/typedapi/types/patternreplacecharfilter.go old mode 100755 new mode 100644 index 49ce204b49..3d9d41a7e7 --- a/typedapi/types/patternreplacecharfilter.go +++ b/typedapi/types/patternreplacecharfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PatternReplaceCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/char_filters.ts#L53-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/char_filters.ts#L53-L58 type PatternReplaceCharFilter struct { Flags *string `json:"flags,omitempty"` Pattern string `json:"pattern"` @@ -31,6 +39,60 @@ type PatternReplaceCharFilter struct { Version *string `json:"version,omitempty"` } +func (s *PatternReplaceCharFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = &o + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "replacement": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Replacement = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPatternReplaceCharFilter returns a PatternReplaceCharFilter. func NewPatternReplaceCharFilter() *PatternReplaceCharFilter { r := &PatternReplaceCharFilter{} diff --git a/typedapi/types/patternreplacetokenfilter.go b/typedapi/types/patternreplacetokenfilter.go old mode 100755 new mode 100644 index 01d258d6f6..e4163e0e72 --- a/typedapi/types/patternreplacetokenfilter.go +++ b/typedapi/types/patternreplacetokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PatternReplaceTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L283-L289 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L284-L290 type PatternReplaceTokenFilter struct { All *bool `json:"all,omitempty"` Flags *string `json:"flags,omitempty"` @@ -32,6 +42,74 @@ type PatternReplaceTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "all": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.All = &value + case bool: + s.All = &v + } + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = &o + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "replacement": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Replacement = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPatternReplaceTokenFilter returns a PatternReplaceTokenFilter. func NewPatternReplaceTokenFilter() *PatternReplaceTokenFilter { r := &PatternReplaceTokenFilter{} diff --git a/typedapi/types/patterntokenizer.go b/typedapi/types/patterntokenizer.go old mode 100755 new mode 100644 index 8529e73608..3a0b67dca2 --- a/typedapi/types/patterntokenizer.go +++ b/typedapi/types/patterntokenizer.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PatternTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L97-L102 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L97-L102 type PatternTokenizer struct { Flags string `json:"flags"` Group int `json:"group"` @@ -31,6 +41,68 @@ type PatternTokenizer struct { Version *string `json:"version,omitempty"` } +func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = o + + case "group": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Group = value + case float64: + f := int(v) + s.Group = f + } + + case "pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pattern = o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPatternTokenizer returns a PatternTokenizer. func NewPatternTokenizer() *PatternTokenizer { r := &PatternTokenizer{} diff --git a/typedapi/types/pendingtask.go b/typedapi/types/pendingtask.go old mode 100755 new mode 100644 index 8f1c2a81fe..1c78c2bd5e --- a/typedapi/types/pendingtask.go +++ b/typedapi/types/pendingtask.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PendingTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/pending_tasks/types.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/pending_tasks/types.ts#L23-L30 type PendingTask struct { Executing bool `json:"executing"` InsertOrder int `json:"insert_order"` @@ -32,6 +42,82 @@ type PendingTask struct { TimeInQueueMillis int64 `json:"time_in_queue_millis"` } +func (s *PendingTask) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "executing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Executing = value + case bool: + s.Executing = v + } + + case "insert_order": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InsertOrder = value + case float64: + f := int(v) + s.InsertOrder = f + } + + case "priority": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Priority = o + + case "source": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Source = o + + case "time_in_queue": + if err := dec.Decode(&s.TimeInQueue); err != nil { + return err + } + + case "time_in_queue_millis": + if err := dec.Decode(&s.TimeInQueueMillis); err != nil { + return err + } + + } + } + return nil +} + // NewPendingTask returns a PendingTask. func NewPendingTask() *PendingTask { r := &PendingTask{} diff --git a/typedapi/types/pendingtasksrecord.go b/typedapi/types/pendingtasksrecord.go old mode 100755 new mode 100644 index 7c7647a76f..0fef84c244 --- a/typedapi/types/pendingtasksrecord.go +++ b/typedapi/types/pendingtasksrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PendingTasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/pending_tasks/types.ts#L20-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/pending_tasks/types.ts#L20-L41 type PendingTasksRecord struct { // InsertOrder task insertion order InsertOrder *string `json:"insertOrder,omitempty"` diff --git a/typedapi/types/percentage.go b/typedapi/types/percentage.go old mode 100755 new mode 100644 index 90c3705057..1699da844d --- a/typedapi/types/percentage.go +++ b/typedapi/types/percentage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // float32 // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Numeric.ts#L28-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Numeric.ts#L28-L28 type Percentage interface{} diff --git a/typedapi/types/percentagescoreheuristic.go b/typedapi/types/percentagescoreheuristic.go old mode 100755 new mode 100644 index d98b32bcd4..17fe07979d --- a/typedapi/types/percentagescoreheuristic.go +++ b/typedapi/types/percentagescoreheuristic.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PercentageScoreHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L336-L336 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L336-L336 type PercentageScoreHeuristic struct { } diff --git a/typedapi/types/percentileranksaggregation.go b/typedapi/types/percentileranksaggregation.go old mode 100755 new mode 100644 index 6efad7bb22..6409c90f7d --- a/typedapi/types/percentileranksaggregation.go +++ b/typedapi/types/percentileranksaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PercentileRanksAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L105-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L105-L110 type PercentileRanksAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -34,6 +44,78 @@ type PercentileRanksAggregation struct { Values []Float64 `json:"values,omitempty"` } +func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "hdr": + if err := dec.Decode(&s.Hdr); err != nil { + return err + } + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "tdigest": + if err := dec.Decode(&s.Tdigest); err != nil { + return err + } + + case "values": + if err := dec.Decode(&s.Values); err != nil { + return err + } + + } + } + return nil +} + // NewPercentileRanksAggregation returns a PercentileRanksAggregation. func NewPercentileRanksAggregation() *PercentileRanksAggregation { r := &PercentileRanksAggregation{} diff --git a/typedapi/types/percentiles.go b/typedapi/types/percentiles.go old mode 100755 new mode 100644 index 42d6c2a296..89ebc6640b --- a/typedapi/types/percentiles.go +++ b/typedapi/types/percentiles.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Percentiles holds the union for the following types: // -// map[string]string +// KeyedPercentiles // []ArrayPercentilesItem // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L149-L150 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L150-L151 type Percentiles interface{} diff --git a/typedapi/types/percentilesaggregation.go b/typedapi/types/percentilesaggregation.go old mode 100755 new mode 100644 index acf46c96b9..e05aa8d61c --- a/typedapi/types/percentilesaggregation.go +++ b/typedapi/types/percentilesaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L112-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L112-L117 type PercentilesAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -34,6 +44,78 @@ type PercentilesAggregation struct { Tdigest *TDigest `json:"tdigest,omitempty"` } +func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "hdr": + if err := dec.Decode(&s.Hdr); err != nil { + return err + } + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "percents": + if err := dec.Decode(&s.Percents); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "tdigest": + if err := dec.Decode(&s.Tdigest); err != nil { + return err + } + + } + } + return nil +} + // NewPercentilesAggregation returns a PercentilesAggregation. func NewPercentilesAggregation() *PercentilesAggregation { r := &PercentilesAggregation{} diff --git a/typedapi/types/percentilesbucketaggregate.go b/typedapi/types/percentilesbucketaggregate.go old mode 100755 new mode 100644 index dd262c7103..17b60c3c2c --- a/typedapi/types/percentilesbucketaggregate.go +++ b/typedapi/types/percentilesbucketaggregate.go @@ -16,20 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // PercentilesBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L177-L178 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L178-L179 type PercentilesBucketAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Values Percentiles `json:"values"` + Meta Metadata `json:"meta,omitempty"` + Values Percentiles `json:"values"` +} + +func (s *PercentilesBucketAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "values": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(KeyedPercentiles, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + case '[': + o := []ArrayPercentilesItem{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + } + + } + } + return nil } // NewPercentilesBucketAggregate returns a PercentilesBucketAggregate. diff --git a/typedapi/types/percentilesbucketaggregation.go b/typedapi/types/percentilesbucketaggregation.go old mode 100755 new mode 100644 index 2a28dde299..69bcb2d0b9 --- a/typedapi/types/percentilesbucketaggregation.go +++ b/typedapi/types/percentilesbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,18 +32,19 @@ import ( // PercentilesBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L276-L278 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L276-L278 type PercentilesBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Percents []Float64 `json:"percents,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Percents []Float64 `json:"percents,omitempty"` } func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +64,12 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -78,9 +82,12 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "percents": if err := dec.Decode(&s.Percents); err != nil { diff --git a/typedapi/types/percolatequery.go b/typedapi/types/percolatequery.go old mode 100755 new mode 100644 index 568efb21e2..d8e5e8bf7a --- a/typedapi/types/percolatequery.go +++ b/typedapi/types/percolatequery.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // PercolateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L110-L120 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L110-L120 type PercolateQuery struct { Boost *float32 `json:"boost,omitempty"` Document json.RawMessage `json:"document,omitempty"` @@ -41,6 +47,101 @@ type PercolateQuery struct { Version *int64 `json:"version,omitempty"` } +func (s *PercolateQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "document": + if err := dec.Decode(&s.Document); err != nil { + return err + } + + case "documents": + if err := dec.Decode(&s.Documents); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "preference": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Preference = &o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPercolateQuery returns a PercolateQuery. func NewPercolateQuery() *PercolateQuery { r := &PercolateQuery{} diff --git a/typedapi/types/percolatorproperty.go b/typedapi/types/percolatorproperty.go old mode 100755 new mode 100644 index ff905f75c2..9f85663a40 --- a/typedapi/types/percolatorproperty.go +++ b/typedapi/types/percolatorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // PercolatorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L177-L179 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L177-L179 type PercolatorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -44,6 +46,7 @@ type PercolatorProperty struct { } func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,6 +66,9 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -350,23 +356,40 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -654,7 +677,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/perpartitioncategorization.go b/typedapi/types/perpartitioncategorization.go old mode 100755 new mode 100644 index 55908591ce..59dd85aaef --- a/typedapi/types/perpartitioncategorization.go +++ b/typedapi/types/perpartitioncategorization.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PerPartitionCategorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Analysis.ts#L93-L102 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Analysis.ts#L93-L102 type PerPartitionCategorization struct { // Enabled To enable this setting, you must also set the `partition_field_name` property // to the same value in every detector that uses the keyword `mlcategory`. @@ -37,6 +47,54 @@ type PerPartitionCategorization struct { StopOnWarn *bool `json:"stop_on_warn,omitempty"` } +func (s *PerPartitionCategorization) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "stop_on_warn": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StopOnWarn = &value + case bool: + s.StopOnWarn = &v + } + + } + } + return nil +} + // NewPerPartitionCategorization returns a PerPartitionCategorization. func NewPerPartitionCategorization() *PerPartitionCategorization { r := &PerPartitionCategorization{} diff --git a/typedapi/types/persistenttaskstatus.go b/typedapi/types/persistenttaskstatus.go old mode 100755 new mode 100644 index 643ceb5ba8..c0ebad3cbf --- a/typedapi/types/persistenttaskstatus.go +++ b/typedapi/types/persistenttaskstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // PersistentTaskStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 type PersistentTaskStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/phase.go b/typedapi/types/phase.go old mode 100755 new mode 100644 index ba1261278d..397d5e44b6 --- a/typedapi/types/phase.go +++ b/typedapi/types/phase.go @@ -16,19 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // Phase type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L25-L33 type Phase struct { Actions *IlmActions `json:"actions,omitempty"` Configurations *Configurations `json:"configurations,omitempty"` MinAge *Duration `json:"min_age,omitempty"` } +func (s *Phase) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "configurations": + if err := dec.Decode(&s.Configurations); err != nil { + return err + } + + case "min_age": + if err := dec.Decode(&s.MinAge); err != nil { + return err + } + + } + } + return nil +} + // NewPhase returns a Phase. func NewPhase() *Phase { r := &Phase{} diff --git a/typedapi/types/phases.go b/typedapi/types/phases.go old mode 100755 new mode 100644 index 671e94a393..9be3fa9119 --- a/typedapi/types/phases.go +++ b/typedapi/types/phases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Phases type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L35-L41 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L35-L41 type Phases struct { Cold *Phase `json:"cold,omitempty"` Delete *Phase `json:"delete,omitempty"` diff --git a/typedapi/types/phonetictokenfilter.go b/typedapi/types/phonetictokenfilter.go old mode 100755 new mode 100644 index d42a94db22..d4714067f3 --- a/typedapi/types/phonetictokenfilter.go +++ b/typedapi/types/phonetictokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,11 +25,19 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/phoneticlanguage" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/phoneticnametype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/phoneticruletype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // PhoneticTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/phonetic-plugin.ts#L64-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/phonetic-plugin.ts#L64-L72 type PhoneticTokenFilter struct { Encoder phoneticencoder.PhoneticEncoder `json:"encoder"` Languageset []phoneticlanguage.PhoneticLanguage `json:"languageset"` @@ -41,6 +49,86 @@ type PhoneticTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "encoder": + if err := dec.Decode(&s.Encoder); err != nil { + return err + } + + case "languageset": + if err := dec.Decode(&s.Languageset); err != nil { + return err + } + + case "max_code_len": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxCodeLen = &value + case float64: + f := int(v) + s.MaxCodeLen = &f + } + + case "name_type": + if err := dec.Decode(&s.NameType); err != nil { + return err + } + + case "replace": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Replace = &value + case bool: + s.Replace = &v + } + + case "rule_type": + if err := dec.Decode(&s.RuleType); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPhoneticTokenFilter returns a PhoneticTokenFilter. func NewPhoneticTokenFilter() *PhoneticTokenFilter { r := &PhoneticTokenFilter{} diff --git a/typedapi/types/phrasesuggest.go b/typedapi/types/phrasesuggest.go old mode 100755 new mode 100644 index 6f059b9990..4986770896 --- a/typedapi/types/phrasesuggest.go +++ b/typedapi/types/phrasesuggest.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PhraseSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L57-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L57-L62 type PhraseSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -30,6 +40,82 @@ type PhraseSuggest struct { Text string `json:"text"` } +func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Length = value + case float64: + f := int(v) + s.Length = f + } + + case "offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Offset = value + case float64: + f := int(v) + s.Offset = f + } + + case "options": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewPhraseSuggestOption() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Options = append(s.Options, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { + return err + } + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewPhraseSuggest returns a PhraseSuggest. func NewPhraseSuggest() *PhraseSuggest { r := &PhraseSuggest{} diff --git a/typedapi/types/phrasesuggestcollate.go b/typedapi/types/phrasesuggestcollate.go old mode 100755 new mode 100644 index 4c24176f2d..3d3868923a --- a/typedapi/types/phrasesuggestcollate.go +++ b/typedapi/types/phrasesuggestcollate.go @@ -16,23 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // PhraseSuggestCollate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L180-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L184-L188 type PhraseSuggestCollate struct { Params map[string]json.RawMessage `json:"params,omitempty"` Prune *bool `json:"prune,omitempty"` Query PhraseSuggestCollateQuery `json:"query"` } +func (s *PhraseSuggestCollate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "prune": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Prune = &value + case bool: + s.Prune = &v + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + } + } + return nil +} + // NewPhraseSuggestCollate returns a PhraseSuggestCollate. func NewPhraseSuggestCollate() *PhraseSuggestCollate { r := &PhraseSuggestCollate{ diff --git a/typedapi/types/phrasesuggestcollatequery.go b/typedapi/types/phrasesuggestcollatequery.go old mode 100755 new mode 100644 index 09481c5cac..a19bd7e788 --- a/typedapi/types/phrasesuggestcollatequery.go +++ b/typedapi/types/phrasesuggestcollatequery.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PhraseSuggestCollateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L186-L189 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L190-L193 type PhraseSuggestCollateQuery struct { Id *string `json:"id,omitempty"` Source *string `json:"source,omitempty"` } +func (s *PhraseSuggestCollateQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "source": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Source = &o + + } + } + return nil +} + // NewPhraseSuggestCollateQuery returns a PhraseSuggestCollateQuery. func NewPhraseSuggestCollateQuery() *PhraseSuggestCollateQuery { r := &PhraseSuggestCollateQuery{} diff --git a/typedapi/types/phrasesuggester.go b/typedapi/types/phrasesuggester.go old mode 100755 new mode 100644 index a1fcbdc848..a122f16ada --- a/typedapi/types/phrasesuggester.go +++ b/typedapi/types/phrasesuggester.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PhraseSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L191-L205 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L195-L209 type PhraseSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Collate *PhraseSuggestCollate `json:"collate,omitempty"` @@ -42,6 +52,201 @@ type PhraseSuggester struct { TokenLimit *int `json:"token_limit,omitempty"` } +func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "collate": + if err := dec.Decode(&s.Collate); err != nil { + return err + } + + case "confidence": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Confidence = &f + case float64: + f := Float64(v) + s.Confidence = &f + } + + case "direct_generator": + if err := dec.Decode(&s.DirectGenerator); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "force_unigrams": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ForceUnigrams = &value + case bool: + s.ForceUnigrams = &v + } + + case "gram_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.GramSize = &value + case float64: + f := int(v) + s.GramSize = &f + } + + case "highlight": + if err := dec.Decode(&s.Highlight); err != nil { + return err + } + + case "max_errors": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MaxErrors = &f + case float64: + f := Float64(v) + s.MaxErrors = &f + } + + case "real_word_error_likelihood": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RealWordErrorLikelihood = &f + case float64: + f := Float64(v) + s.RealWordErrorLikelihood = &f + } + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = &o + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "smoothing": + if err := dec.Decode(&s.Smoothing); err != nil { + return err + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = &o + + case "token_limit": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TokenLimit = &value + case float64: + f := int(v) + s.TokenLimit = &f + } + + } + } + return nil +} + // NewPhraseSuggester returns a PhraseSuggester. func NewPhraseSuggester() *PhraseSuggester { r := &PhraseSuggester{} diff --git a/typedapi/types/phrasesuggesthighlight.go b/typedapi/types/phrasesuggesthighlight.go old mode 100755 new mode 100644 index c1e07d19fd..8403794933 --- a/typedapi/types/phrasesuggesthighlight.go +++ b/typedapi/types/phrasesuggesthighlight.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PhraseSuggestHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L207-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L211-L214 type PhraseSuggestHighlight struct { PostTag string `json:"post_tag"` PreTag string `json:"pre_tag"` diff --git a/typedapi/types/phrasesuggestoption.go b/typedapi/types/phrasesuggestoption.go old mode 100755 new mode 100644 index 22fea330ca..71a4c34fde --- a/typedapi/types/phrasesuggestoption.go +++ b/typedapi/types/phrasesuggestoption.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PhraseSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L86-L91 type PhraseSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Highlighted *string `json:"highlighted,omitempty"` @@ -30,6 +40,72 @@ type PhraseSuggestOption struct { Text string `json:"text"` } +func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collate_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CollateMatch = &value + case bool: + s.CollateMatch = &v + } + + case "highlighted": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Highlighted = &o + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = f + case float64: + f := Float64(v) + s.Score = f + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewPhraseSuggestOption returns a PhraseSuggestOption. func NewPhraseSuggestOption() *PhraseSuggestOption { r := &PhraseSuggestOption{} diff --git a/typedapi/types/pinneddoc.go b/typedapi/types/pinneddoc.go old mode 100755 new mode 100644 index dec1bb634a..d039ea5dad --- a/typedapi/types/pinneddoc.go +++ b/typedapi/types/pinneddoc.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PinnedDoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L132-L135 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L132-L135 type PinnedDoc struct { Id_ string `json:"_id"` Index_ string `json:"_index"` } +func (s *PinnedDoc) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + } + } + return nil +} + // NewPinnedDoc returns a PinnedDoc. func NewPinnedDoc() *PinnedDoc { r := &PinnedDoc{} diff --git a/typedapi/types/pinnedquery.go b/typedapi/types/pinnedquery.go old mode 100755 new mode 100644 index 3f3f45b4eb..b7ef8d4318 --- a/typedapi/types/pinnedquery.go +++ b/typedapi/types/pinnedquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PinnedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L122-L130 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L122-L130 type PinnedQuery struct { Boost *float32 `json:"boost,omitempty"` Docs []PinnedDoc `json:"docs,omitempty"` @@ -31,6 +41,65 @@ type PinnedQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *PinnedQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "docs": + if err := dec.Decode(&s.Docs); err != nil { + return err + } + + case "ids": + if err := dec.Decode(&s.Ids); err != nil { + return err + } + + case "organic": + if err := dec.Decode(&s.Organic); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewPinnedQuery returns a PinnedQuery. func NewPinnedQuery() *PinnedQuery { r := &PinnedQuery{} diff --git a/typedapi/types/pipelineconfig.go b/typedapi/types/pipelineconfig.go old mode 100755 new mode 100644 index e548ba6f91..61f778bc2b --- a/typedapi/types/pipelineconfig.go +++ b/typedapi/types/pipelineconfig.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PipelineConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Pipeline.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Pipeline.ts#L44-L48 type PipelineConfig struct { Description *string `json:"description,omitempty"` Processors []ProcessorContainer `json:"processors"` Version *int64 `json:"version,omitempty"` } +func (s *PipelineConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "processors": + if err := dec.Decode(&s.Processors); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPipelineConfig returns a PipelineConfig. func NewPipelineConfig() *PipelineConfig { r := &PipelineConfig{} diff --git a/typedapi/types/pipelinemetadata.go b/typedapi/types/pipelinemetadata.go old mode 100755 new mode 100644 index 032893ec4f..17a6a608a8 --- a/typedapi/types/pipelinemetadata.go +++ b/typedapi/types/pipelinemetadata.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PipelineMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/_types/Pipeline.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/logstash/_types/Pipeline.ts#L23-L26 type PipelineMetadata struct { Type string `json:"type"` Version string `json:"version"` diff --git a/typedapi/types/pipelineprocessor.go b/typedapi/types/pipelineprocessor.go old mode 100755 new mode 100644 index c39a5137c5..2753dc2083 --- a/typedapi/types/pipelineprocessor.go +++ b/typedapi/types/pipelineprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PipelineProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L306-L309 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L306-L309 type PipelineProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` @@ -33,6 +43,88 @@ type PipelineProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing_pipeline": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissingPipeline = &value + case bool: + s.IgnoreMissingPipeline = &v + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewPipelineProcessor returns a PipelineProcessor. func NewPipelineProcessor() *PipelineProcessor { r := &PipelineProcessor{} diff --git a/typedapi/types/pipelinesettings.go b/typedapi/types/pipelinesettings.go old mode 100755 new mode 100644 index d0715cdf3c..cff9a9d928 --- a/typedapi/types/pipelinesettings.go +++ b/typedapi/types/pipelinesettings.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PipelineSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/logstash/_types/Pipeline.ts#L28-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/logstash/_types/Pipeline.ts#L28-L36 type PipelineSettings struct { PipelineBatchDelay int `json:"pipeline.batch.delay"` PipelineBatchSize int `json:"pipeline.batch.size"` @@ -33,6 +43,122 @@ type PipelineSettings struct { QueueType string `json:"queue.type"` } +func (s *PipelineSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "pipeline.batch.delay": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PipelineBatchDelay = value + case float64: + f := int(v) + s.PipelineBatchDelay = f + } + + case "pipeline.batch.size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PipelineBatchSize = value + case float64: + f := int(v) + s.PipelineBatchSize = f + } + + case "pipeline.workers": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PipelineWorkers = value + case float64: + f := int(v) + s.PipelineWorkers = f + } + + case "queue.checkpoint.writes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueCheckpointWrites = value + case float64: + f := int(v) + s.QueueCheckpointWrites = f + } + + case "queue.max_bytes.number": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueMaxBytesNumber = value + case float64: + f := int(v) + s.QueueMaxBytesNumber = f + } + + case "queue.max_bytes.units": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueueMaxBytesUnits = o + + case "queue.type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueueType = o + + } + } + return nil +} + // NewPipelineSettings returns a PipelineSettings. func NewPipelineSettings() *PipelineSettings { r := &PipelineSettings{} diff --git a/typedapi/types/pipelinesimulation.go b/typedapi/types/pipelinesimulation.go old mode 100755 new mode 100644 index c576547de7..a9b02492a6 --- a/typedapi/types/pipelinesimulation.go +++ b/typedapi/types/pipelinesimulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // PipelineSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/types.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/types.ts#L33-L39 type PipelineSimulation struct { Doc *DocumentSimulation `json:"doc,omitempty"` ProcessorResults []PipelineSimulation `json:"processor_results,omitempty"` diff --git a/typedapi/types/pivot.go b/typedapi/types/pivot.go old mode 100755 new mode 100644 index 408c6102ef..f06170fbeb --- a/typedapi/types/pivot.go +++ b/typedapi/types/pivot.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Pivot type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L54-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L54-L68 type Pivot struct { // Aggregations Defines how to aggregate the grouped data. The following aggregations are // currently supported: average, bucket diff --git a/typedapi/types/pivotgroupbycontainer.go b/typedapi/types/pivotgroupbycontainer.go old mode 100755 new mode 100644 index 33968dd568..83c5756bb8 --- a/typedapi/types/pivotgroupbycontainer.go +++ b/typedapi/types/pivotgroupbycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PivotGroupByContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L70-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L70-L78 type PivotGroupByContainer struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` diff --git a/typedapi/types/pluginsrecord.go b/typedapi/types/pluginsrecord.go old mode 100755 new mode 100644 index a6b93d83c3..44bbb18ce0 --- a/typedapi/types/pluginsrecord.go +++ b/typedapi/types/pluginsrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PluginsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/plugins/types.ts#L22-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/plugins/types.ts#L22-L52 type PluginsRecord struct { // Component component Component *string `json:"component,omitempty"` @@ -38,6 +46,65 @@ type PluginsRecord struct { Version *string `json:"version,omitempty"` } +func (s *PluginsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "component", "c": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Component = &o + + case "description", "d": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "name", "n": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type", "t": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPluginsRecord returns a PluginsRecord. func NewPluginsRecord() *PluginsRecord { r := &PluginsRecord{} diff --git a/typedapi/types/pluginsstatus.go b/typedapi/types/pluginsstatus.go old mode 100755 new mode 100644 index 48333f2d57..9ecb95f789 --- a/typedapi/types/pluginsstatus.go +++ b/typedapi/types/pluginsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // PluginsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 type PluginsStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/pluginstats.go b/typedapi/types/pluginstats.go old mode 100755 new mode 100644 index 1c9a370a53..fb695875ba --- a/typedapi/types/pluginstats.go +++ b/typedapi/types/pluginstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PluginStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L138-L148 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L138-L148 type PluginStats struct { Classname string `json:"classname"` Description string `json:"description"` @@ -35,6 +45,95 @@ type PluginStats struct { Version string `json:"version"` } +func (s *PluginStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classname": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Classname = o + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "elasticsearch_version": + if err := dec.Decode(&s.ElasticsearchVersion); err != nil { + return err + } + + case "extended_plugins": + if err := dec.Decode(&s.ExtendedPlugins); err != nil { + return err + } + + case "has_native_controller": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.HasNativeController = value + case bool: + s.HasNativeController = v + } + + case "java_version": + if err := dec.Decode(&s.JavaVersion); err != nil { + return err + } + + case "licensed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Licensed = value + case bool: + s.Licensed = v + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPluginStats returns a PluginStats. func NewPluginStats() *PluginStats { r := &PluginStats{} diff --git a/typedapi/types/pointintimereference.go b/typedapi/types/pointintimereference.go old mode 100755 new mode 100644 index 11ba4cbc50..2a845fe05b --- a/typedapi/types/pointintimereference.go +++ b/typedapi/types/pointintimereference.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PointInTimeReference type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 type PointInTimeReference struct { Id string `json:"id"` KeepAlive Duration `json:"keep_alive,omitempty"` } +func (s *PointInTimeReference) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "keep_alive": + if err := dec.Decode(&s.KeepAlive); err != nil { + return err + } + + } + } + return nil +} + // NewPointInTimeReference returns a PointInTimeReference. func NewPointInTimeReference() *PointInTimeReference { r := &PointInTimeReference{} diff --git a/typedapi/types/pointproperty.go b/typedapi/types/pointproperty.go old mode 100755 new mode 100644 index 275b2f1219..e290675fac --- a/typedapi/types/pointproperty.go +++ b/typedapi/types/pointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // PointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L62-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L62-L67 type PointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -51,6 +53,7 @@ type PointProperty struct { } func (s *PointProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,13 +68,33 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -80,6 +103,9 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -367,38 +393,76 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "ignore_z_value": - if err := dec.Decode(&s.IgnoreZValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreZValue = &value + case bool: + s.IgnoreZValue = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.NullValue = &o case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -686,20 +750,32 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/pool.go b/typedapi/types/pool.go old mode 100755 new mode 100644 index 6ced9fcb45..f695264eca --- a/typedapi/types/pool.go +++ b/typedapi/types/pool.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Pool type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L345-L350 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L345-L350 type Pool struct { MaxInBytes *int64 `json:"max_in_bytes,omitempty"` PeakMaxInBytes *int64 `json:"peak_max_in_bytes,omitempty"` @@ -30,6 +40,86 @@ type Pool struct { UsedInBytes *int64 `json:"used_in_bytes,omitempty"` } +func (s *Pool) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxInBytes = &value + case float64: + f := int64(v) + s.MaxInBytes = &f + } + + case "peak_max_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PeakMaxInBytes = &value + case float64: + f := int64(v) + s.PeakMaxInBytes = &f + } + + case "peak_used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PeakUsedInBytes = &value + case float64: + f := int64(v) + s.PeakUsedInBytes = &f + } + + case "used_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UsedInBytes = &value + case float64: + f := int64(v) + s.UsedInBytes = &f + } + + } + } + return nil +} + // NewPool returns a Pool. func NewPool() *Pool { r := &Pool{} diff --git a/typedapi/types/porterstemtokenfilter.go b/typedapi/types/porterstemtokenfilter.go old mode 100755 new mode 100644 index f48dcf1b0d..3515b2160e --- a/typedapi/types/porterstemtokenfilter.go +++ b/typedapi/types/porterstemtokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PorterStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L291-L293 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L292-L294 type PorterStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *PorterStemTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPorterStemTokenFilter returns a PorterStemTokenFilter. func NewPorterStemTokenFilter() *PorterStemTokenFilter { r := &PorterStemTokenFilter{} diff --git a/typedapi/types/postmigrationfeature.go b/typedapi/types/postmigrationfeature.go old mode 100755 new mode 100644 index d9d2685058..79796eedd4 --- a/typedapi/types/postmigrationfeature.go +++ b/typedapi/types/postmigrationfeature.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // PostMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 type PostMigrationFeature struct { FeatureName string `json:"feature_name"` } diff --git a/typedapi/types/predicatetokenfilter.go b/typedapi/types/predicatetokenfilter.go old mode 100755 new mode 100644 index 1e134c6c25..acd62fb812 --- a/typedapi/types/predicatetokenfilter.go +++ b/typedapi/types/predicatetokenfilter.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // PredicateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L295-L298 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L296-L299 type PredicateTokenFilter struct { Script Script `json:"script"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *PredicateTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewPredicateTokenFilter returns a PredicateTokenFilter. func NewPredicateTokenFilter() *PredicateTokenFilter { r := &PredicateTokenFilter{} diff --git a/typedapi/types/predictedvalue.go b/typedapi/types/predictedvalue.go old mode 100755 new mode 100644 index d404812dd8..de5d1fba8d --- a/typedapi/types/predictedvalue.go +++ b/typedapi/types/predictedvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,5 +27,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L416-L416 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L416-L416 type PredictedValue interface{} diff --git a/typedapi/types/prefixquery.go b/typedapi/types/prefixquery.go old mode 100755 new mode 100644 index f550d2d1be..b90255892d --- a/typedapi/types/prefixquery.go +++ b/typedapi/types/prefixquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L57-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L57-L66 type PrefixQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` @@ -31,6 +41,82 @@ type PrefixQuery struct { Value string `json:"value"` } +func (s *PrefixQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "case_insensitive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CaseInsensitive = &value + case bool: + s.CaseInsensitive = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "rewrite": + if err := dec.Decode(&s.Rewrite); err != nil { + return err + } + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewPrefixQuery returns a PrefixQuery. func NewPrefixQuery() *PrefixQuery { r := &PrefixQuery{} diff --git a/typedapi/types/preprocessor.go b/typedapi/types/preprocessor.go old mode 100755 new mode 100644 index 3c085cf090..bafde41369 --- a/typedapi/types/preprocessor.go +++ b/typedapi/types/preprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Preprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L31-L36 type Preprocessor struct { FrequencyEncoding *FrequencyEncodingPreprocessor `json:"frequency_encoding,omitempty"` OneHotEncoding *OneHotEncodingPreprocessor `json:"one_hot_encoding,omitempty"` diff --git a/typedapi/types/pressurememory.go b/typedapi/types/pressurememory.go old mode 100755 new mode 100644 index 710ec54dd2..650de9afed --- a/typedapi/types/pressurememory.go +++ b/typedapi/types/pressurememory.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L66-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L66-L80 type PressureMemory struct { All ByteSize `json:"all,omitempty"` AllInBytes *int64 `json:"all_in_bytes,omitempty"` @@ -39,6 +49,171 @@ type PressureMemory struct { ReplicaRejections *int64 `json:"replica_rejections,omitempty"` } +func (s *PressureMemory) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "all": + if err := dec.Decode(&s.All); err != nil { + return err + } + + case "all_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AllInBytes = &value + case float64: + f := int64(v) + s.AllInBytes = &f + } + + case "combined_coordinating_and_primary": + if err := dec.Decode(&s.CombinedCoordinatingAndPrimary); err != nil { + return err + } + + case "combined_coordinating_and_primary_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CombinedCoordinatingAndPrimaryInBytes = &value + case float64: + f := int64(v) + s.CombinedCoordinatingAndPrimaryInBytes = &f + } + + case "coordinating": + if err := dec.Decode(&s.Coordinating); err != nil { + return err + } + + case "coordinating_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CoordinatingInBytes = &value + case float64: + f := int64(v) + s.CoordinatingInBytes = &f + } + + case "coordinating_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CoordinatingRejections = &value + case float64: + f := int64(v) + s.CoordinatingRejections = &f + } + + case "primary": + if err := dec.Decode(&s.Primary); err != nil { + return err + } + + case "primary_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryInBytes = &value + case float64: + f := int64(v) + s.PrimaryInBytes = &f + } + + case "primary_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryRejections = &value + case float64: + f := int64(v) + s.PrimaryRejections = &f + } + + case "replica": + if err := dec.Decode(&s.Replica); err != nil { + return err + } + + case "replica_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReplicaInBytes = &value + case float64: + f := int64(v) + s.ReplicaInBytes = &f + } + + case "replica_rejections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReplicaRejections = &value + case float64: + f := int64(v) + s.ReplicaRejections = &f + } + + } + } + return nil +} + // NewPressureMemory returns a PressureMemory. func NewPressureMemory() *PressureMemory { r := &PressureMemory{} diff --git a/typedapi/types/privileges.go b/typedapi/types/privileges.go old mode 100755 new mode 100644 index 2e34498221..ffb566475c --- a/typedapi/types/privileges.go +++ b/typedapi/types/privileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Privileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/types.ts#L48-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/types.ts#L48-L48 type Privileges map[string]bool diff --git a/typedapi/types/privilegesactions.go b/typedapi/types/privilegesactions.go old mode 100755 new mode 100644 index df4f9b162c..926f21e2c3 --- a/typedapi/types/privilegesactions.go +++ b/typedapi/types/privilegesactions.go @@ -16,22 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // PrivilegesActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/put_privileges/types.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/put_privileges/types.ts#L22-L27 type PrivilegesActions struct { - Actions []string `json:"actions"` - Application *string `json:"application,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - Name *string `json:"name,omitempty"` + Actions []string `json:"actions"` + Application *string `json:"application,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + Name *string `json:"name,omitempty"` +} + +func (s *PrivilegesActions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "application": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Application = &o + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil } // NewPrivilegesActions returns a PrivilegesActions. diff --git a/typedapi/types/privilegescheck.go b/typedapi/types/privilegescheck.go old mode 100755 new mode 100644 index 794f80d389..75a09108ef --- a/typedapi/types/privilegescheck.go +++ b/typedapi/types/privilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // PrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges_user_profile/types.ts#L30-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges_user_profile/types.ts#L30-L37 type PrivilegesCheck struct { Application []ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. diff --git a/typedapi/types/process.go b/typedapi/types/process.go old mode 100755 new mode 100644 index 1eceabf438..f2082d32f4 --- a/typedapi/types/process.go +++ b/typedapi/types/process.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Process type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L381-L387 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L381-L387 type Process struct { Cpu *Cpu `json:"cpu,omitempty"` MaxFileDescriptors *int `json:"max_file_descriptors,omitempty"` @@ -31,6 +41,83 @@ type Process struct { Timestamp *int64 `json:"timestamp,omitempty"` } +func (s *Process) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cpu": + if err := dec.Decode(&s.Cpu); err != nil { + return err + } + + case "max_file_descriptors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxFileDescriptors = &value + case float64: + f := int(v) + s.MaxFileDescriptors = &f + } + + case "mem": + if err := dec.Decode(&s.Mem); err != nil { + return err + } + + case "open_file_descriptors": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.OpenFileDescriptors = &value + case float64: + f := int(v) + s.OpenFileDescriptors = &f + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + } + } + return nil +} + // NewProcess returns a Process. func NewProcess() *Process { r := &Process{} diff --git a/typedapi/types/processor.go b/typedapi/types/processor.go old mode 100755 new mode 100644 index 1ab27b544f..27e94a75d7 --- a/typedapi/types/processor.go +++ b/typedapi/types/processor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Processor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L162-L167 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L162-L167 type Processor struct { Count *int64 `json:"count,omitempty"` Current *int64 `json:"current,omitempty"` @@ -30,6 +40,76 @@ type Processor struct { TimeInMillis *int64 `json:"time_in_millis,omitempty"` } +func (s *Processor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = &value + case float64: + f := int64(v) + s.Current = &f + } + + case "failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Failed = &value + case float64: + f := int64(v) + s.Failed = &f + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewProcessor returns a Processor. func NewProcessor() *Processor { r := &Processor{} diff --git a/typedapi/types/processorcontainer.go b/typedapi/types/processorcontainer.go old mode 100755 new mode 100644 index fa82a10532..a41db241a2 --- a/typedapi/types/processorcontainer.go +++ b/typedapi/types/processorcontainer.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ProcessorContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L28-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L28-L67 type ProcessorContainer struct { Append *AppendProcessor `json:"append,omitempty"` Attachment *AttachmentProcessor `json:"attachment,omitempty"` @@ -60,6 +68,196 @@ type ProcessorContainer struct { UserAgent *UserAgentProcessor `json:"user_agent,omitempty"` } +func (s *ProcessorContainer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "append": + if err := dec.Decode(&s.Append); err != nil { + return err + } + + case "attachment": + if err := dec.Decode(&s.Attachment); err != nil { + return err + } + + case "bytes": + if err := dec.Decode(&s.Bytes); err != nil { + return err + } + + case "circle": + if err := dec.Decode(&s.Circle); err != nil { + return err + } + + case "convert": + if err := dec.Decode(&s.Convert); err != nil { + return err + } + + case "csv": + if err := dec.Decode(&s.Csv); err != nil { + return err + } + + case "date": + if err := dec.Decode(&s.Date); err != nil { + return err + } + + case "date_index_name": + if err := dec.Decode(&s.DateIndexName); err != nil { + return err + } + + case "dissect": + if err := dec.Decode(&s.Dissect); err != nil { + return err + } + + case "dot_expander": + if err := dec.Decode(&s.DotExpander); err != nil { + return err + } + + case "drop": + if err := dec.Decode(&s.Drop); err != nil { + return err + } + + case "enrich": + if err := dec.Decode(&s.Enrich); err != nil { + return err + } + + case "fail": + if err := dec.Decode(&s.Fail); err != nil { + return err + } + + case "foreach": + if err := dec.Decode(&s.Foreach); err != nil { + return err + } + + case "geoip": + if err := dec.Decode(&s.Geoip); err != nil { + return err + } + + case "grok": + if err := dec.Decode(&s.Grok); err != nil { + return err + } + + case "gsub": + if err := dec.Decode(&s.Gsub); err != nil { + return err + } + + case "inference": + if err := dec.Decode(&s.Inference); err != nil { + return err + } + + case "join": + if err := dec.Decode(&s.Join); err != nil { + return err + } + + case "json": + if err := dec.Decode(&s.Json); err != nil { + return err + } + + case "kv": + if err := dec.Decode(&s.Kv); err != nil { + return err + } + + case "lowercase": + if err := dec.Decode(&s.Lowercase); err != nil { + return err + } + + case "pipeline": + if err := dec.Decode(&s.Pipeline); err != nil { + return err + } + + case "remove": + if err := dec.Decode(&s.Remove); err != nil { + return err + } + + case "rename": + if err := dec.Decode(&s.Rename); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "set": + if err := dec.Decode(&s.Set); err != nil { + return err + } + + case "set_security_user": + if err := dec.Decode(&s.SetSecurityUser); err != nil { + return err + } + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "split": + if err := dec.Decode(&s.Split); err != nil { + return err + } + + case "trim": + if err := dec.Decode(&s.Trim); err != nil { + return err + } + + case "uppercase": + if err := dec.Decode(&s.Uppercase); err != nil { + return err + } + + case "urldecode": + if err := dec.Decode(&s.Urldecode); err != nil { + return err + } + + case "user_agent": + if err := dec.Decode(&s.UserAgent); err != nil { + return err + } + + } + } + return nil +} + // NewProcessorContainer returns a ProcessorContainer. func NewProcessorContainer() *ProcessorContainer { r := &ProcessorContainer{} diff --git a/typedapi/types/profile.go b/typedapi/types/profile.go old mode 100755 new mode 100644 index 8b4322d9f5..587bed79d5 --- a/typedapi/types/profile.go +++ b/typedapi/types/profile.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Profile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L93-L95 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L93-L95 type Profile struct { Shards []ShardProfile `json:"shards"` } diff --git a/typedapi/types/property.go b/typedapi/types/property.go old mode 100755 new mode 100644 index b28f4c3ccf..fc273c88c1 --- a/typedapi/types/property.go +++ b/typedapi/types/property.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -69,5 +69,5 @@ package types // IpRangeProperty // LongRangeProperty // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/Property.ts#L93-L156 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/Property.ts#L93-L156 type Property interface{} diff --git a/typedapi/types/publishedclusterstates.go b/typedapi/types/publishedclusterstates.go old mode 100755 new mode 100644 index b5c1b530c2..2f9d76f443 --- a/typedapi/types/publishedclusterstates.go +++ b/typedapi/types/publishedclusterstates.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // PublishedClusterStates type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L120-L124 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L120-L124 type PublishedClusterStates struct { CompatibleDiffs *int64 `json:"compatible_diffs,omitempty"` FullStates *int64 `json:"full_states,omitempty"` IncompatibleDiffs *int64 `json:"incompatible_diffs,omitempty"` } +func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compatible_diffs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CompatibleDiffs = &value + case float64: + f := int64(v) + s.CompatibleDiffs = &f + } + + case "full_states": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FullStates = &value + case float64: + f := int64(v) + s.FullStates = &f + } + + case "incompatible_diffs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IncompatibleDiffs = &value + case float64: + f := int64(v) + s.IncompatibleDiffs = &f + } + + } + } + return nil +} + // NewPublishedClusterStates returns a PublishedClusterStates. func NewPublishedClusterStates() *PublishedClusterStates { r := &PublishedClusterStates{} diff --git a/typedapi/types/queries.go b/typedapi/types/queries.go old mode 100755 new mode 100644 index a44b8f20ec..d151c06ae5 --- a/typedapi/types/queries.go +++ b/typedapi/types/queries.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Queries type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L394-L396 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L394-L396 type Queries struct { Cache *CacheQueries `json:"cache,omitempty"` } diff --git a/typedapi/types/query.go b/typedapi/types/query.go old mode 100755 new mode 100644 index 532ff1bd32..67241d723d --- a/typedapi/types/query.go +++ b/typedapi/types/query.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Query type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L96-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L96-L162 type Query struct { Bool *BoolQuery `json:"bool,omitempty"` Boosting *BoostingQuery `json:"boosting,omitempty"` @@ -80,6 +88,338 @@ type Query struct { Wrapper *WrapperQuery `json:"wrapper,omitempty"` } +func (s *Query) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bool": + if err := dec.Decode(&s.Bool); err != nil { + return err + } + + case "boosting": + if err := dec.Decode(&s.Boosting); err != nil { + return err + } + + case "combined_fields": + if err := dec.Decode(&s.CombinedFields); err != nil { + return err + } + + case "common": + if s.Common == nil { + s.Common = make(map[string]CommonTermsQuery, 0) + } + if err := dec.Decode(&s.Common); err != nil { + return err + } + + case "constant_score": + if err := dec.Decode(&s.ConstantScore); err != nil { + return err + } + + case "dis_max": + if err := dec.Decode(&s.DisMax); err != nil { + return err + } + + case "distance_feature": + if err := dec.Decode(&s.DistanceFeature); err != nil { + return err + } + + case "exists": + if err := dec.Decode(&s.Exists); err != nil { + return err + } + + case "field_masking_span": + if err := dec.Decode(&s.FieldMaskingSpan); err != nil { + return err + } + + case "function_score": + if err := dec.Decode(&s.FunctionScore); err != nil { + return err + } + + case "fuzzy": + if s.Fuzzy == nil { + s.Fuzzy = make(map[string]FuzzyQuery, 0) + } + if err := dec.Decode(&s.Fuzzy); err != nil { + return err + } + + case "geo_bounding_box": + if err := dec.Decode(&s.GeoBoundingBox); err != nil { + return err + } + + case "geo_distance": + if err := dec.Decode(&s.GeoDistance); err != nil { + return err + } + + case "geo_polygon": + if err := dec.Decode(&s.GeoPolygon); err != nil { + return err + } + + case "geo_shape": + if err := dec.Decode(&s.GeoShape); err != nil { + return err + } + + case "has_child": + if err := dec.Decode(&s.HasChild); err != nil { + return err + } + + case "has_parent": + if err := dec.Decode(&s.HasParent); err != nil { + return err + } + + case "ids": + if err := dec.Decode(&s.Ids); err != nil { + return err + } + + case "intervals": + if s.Intervals == nil { + s.Intervals = make(map[string]IntervalsQuery, 0) + } + if err := dec.Decode(&s.Intervals); err != nil { + return err + } + + case "match": + if s.Match == nil { + s.Match = make(map[string]MatchQuery, 0) + } + if err := dec.Decode(&s.Match); err != nil { + return err + } + + case "match_all": + if err := dec.Decode(&s.MatchAll); err != nil { + return err + } + + case "match_bool_prefix": + if s.MatchBoolPrefix == nil { + s.MatchBoolPrefix = make(map[string]MatchBoolPrefixQuery, 0) + } + if err := dec.Decode(&s.MatchBoolPrefix); err != nil { + return err + } + + case "match_none": + if err := dec.Decode(&s.MatchNone); err != nil { + return err + } + + case "match_phrase": + if s.MatchPhrase == nil { + s.MatchPhrase = make(map[string]MatchPhraseQuery, 0) + } + if err := dec.Decode(&s.MatchPhrase); err != nil { + return err + } + + case "match_phrase_prefix": + if s.MatchPhrasePrefix == nil { + s.MatchPhrasePrefix = make(map[string]MatchPhrasePrefixQuery, 0) + } + if err := dec.Decode(&s.MatchPhrasePrefix); err != nil { + return err + } + + case "more_like_this": + if err := dec.Decode(&s.MoreLikeThis); err != nil { + return err + } + + case "multi_match": + if err := dec.Decode(&s.MultiMatch); err != nil { + return err + } + + case "nested": + if err := dec.Decode(&s.Nested); err != nil { + return err + } + + case "parent_id": + if err := dec.Decode(&s.ParentId); err != nil { + return err + } + + case "percolate": + if err := dec.Decode(&s.Percolate); err != nil { + return err + } + + case "pinned": + if err := dec.Decode(&s.Pinned); err != nil { + return err + } + + case "prefix": + if s.Prefix == nil { + s.Prefix = make(map[string]PrefixQuery, 0) + } + if err := dec.Decode(&s.Prefix); err != nil { + return err + } + + case "query_string": + if err := dec.Decode(&s.QueryString); err != nil { + return err + } + + case "range": + if s.Range == nil { + s.Range = make(map[string]RangeQuery, 0) + } + if err := dec.Decode(&s.Range); err != nil { + return err + } + + case "rank_feature": + if err := dec.Decode(&s.RankFeature); err != nil { + return err + } + + case "regexp": + if s.Regexp == nil { + s.Regexp = make(map[string]RegexpQuery, 0) + } + if err := dec.Decode(&s.Regexp); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "script_score": + if err := dec.Decode(&s.ScriptScore); err != nil { + return err + } + + case "shape": + if err := dec.Decode(&s.Shape); err != nil { + return err + } + + case "simple_query_string": + if err := dec.Decode(&s.SimpleQueryString); err != nil { + return err + } + + case "span_containing": + if err := dec.Decode(&s.SpanContaining); err != nil { + return err + } + + case "span_first": + if err := dec.Decode(&s.SpanFirst); err != nil { + return err + } + + case "span_multi": + if err := dec.Decode(&s.SpanMulti); err != nil { + return err + } + + case "span_near": + if err := dec.Decode(&s.SpanNear); err != nil { + return err + } + + case "span_not": + if err := dec.Decode(&s.SpanNot); err != nil { + return err + } + + case "span_or": + if err := dec.Decode(&s.SpanOr); err != nil { + return err + } + + case "span_term": + if s.SpanTerm == nil { + s.SpanTerm = make(map[string]SpanTermQuery, 0) + } + if err := dec.Decode(&s.SpanTerm); err != nil { + return err + } + + case "span_within": + if err := dec.Decode(&s.SpanWithin); err != nil { + return err + } + + case "term": + if s.Term == nil { + s.Term = make(map[string]TermQuery, 0) + } + if err := dec.Decode(&s.Term); err != nil { + return err + } + + case "terms": + if err := dec.Decode(&s.Terms); err != nil { + return err + } + + case "terms_set": + if s.TermsSet == nil { + s.TermsSet = make(map[string]TermsSetQuery, 0) + } + if err := dec.Decode(&s.TermsSet); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "wildcard": + if s.Wildcard == nil { + s.Wildcard = make(map[string]WildcardQuery, 0) + } + if err := dec.Decode(&s.Wildcard); err != nil { + return err + } + + case "wrapper": + if err := dec.Decode(&s.Wrapper); err != nil { + return err + } + + } + } + return nil +} + // NewQuery returns a Query. func NewQuery() *Query { r := &Query{ diff --git a/typedapi/types/querybreakdown.go b/typedapi/types/querybreakdown.go old mode 100755 new mode 100644 index 3882bd6566..25977ebd55 --- a/typedapi/types/querybreakdown.go +++ b/typedapi/types/querybreakdown.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // QueryBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L97-L116 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L97-L116 type QueryBreakdown struct { Advance int64 `json:"advance"` AdvanceCount int64 `json:"advance_count"` @@ -44,6 +54,296 @@ type QueryBreakdown struct { ShallowAdvanceCount int64 `json:"shallow_advance_count"` } +func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "advance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Advance = value + case float64: + f := int64(v) + s.Advance = f + } + + case "advance_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AdvanceCount = value + case float64: + f := int64(v) + s.AdvanceCount = f + } + + case "build_scorer": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildScorer = value + case float64: + f := int64(v) + s.BuildScorer = f + } + + case "build_scorer_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BuildScorerCount = value + case float64: + f := int64(v) + s.BuildScorerCount = f + } + + case "compute_max_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ComputeMaxScore = value + case float64: + f := int64(v) + s.ComputeMaxScore = f + } + + case "compute_max_score_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ComputeMaxScoreCount = value + case float64: + f := int64(v) + s.ComputeMaxScoreCount = f + } + + case "create_weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CreateWeight = value + case float64: + f := int64(v) + s.CreateWeight = f + } + + case "create_weight_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CreateWeightCount = value + case float64: + f := int64(v) + s.CreateWeightCount = f + } + + case "match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Match = value + case float64: + f := int64(v) + s.Match = f + } + + case "match_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MatchCount = value + case float64: + f := int64(v) + s.MatchCount = f + } + + case "next_doc": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NextDoc = value + case float64: + f := int64(v) + s.NextDoc = f + } + + case "next_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NextDocCount = value + case float64: + f := int64(v) + s.NextDocCount = f + } + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Score = value + case float64: + f := int64(v) + s.Score = f + } + + case "score_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ScoreCount = value + case float64: + f := int64(v) + s.ScoreCount = f + } + + case "set_min_competitive_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SetMinCompetitiveScore = value + case float64: + f := int64(v) + s.SetMinCompetitiveScore = f + } + + case "set_min_competitive_score_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SetMinCompetitiveScoreCount = value + case float64: + f := int64(v) + s.SetMinCompetitiveScoreCount = f + } + + case "shallow_advance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShallowAdvance = value + case float64: + f := int64(v) + s.ShallowAdvance = f + } + + case "shallow_advance_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShallowAdvanceCount = value + case float64: + f := int64(v) + s.ShallowAdvanceCount = f + } + + } + } + return nil +} + // NewQueryBreakdown returns a QueryBreakdown. func NewQueryBreakdown() *QueryBreakdown { r := &QueryBreakdown{} diff --git a/typedapi/types/querycachestats.go b/typedapi/types/querycachestats.go old mode 100755 new mode 100644 index 6f836cc761..3c77e0cffb --- a/typedapi/types/querycachestats.go +++ b/typedapi/types/querycachestats.go @@ -16,24 +16,170 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // QueryCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L150-L159 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L150-L159 type QueryCacheStats struct { CacheCount int `json:"cache_count"` CacheSize int `json:"cache_size"` Evictions int `json:"evictions"` HitCount int `json:"hit_count"` MemorySize ByteSize `json:"memory_size,omitempty"` - MemorySizeInBytes int `json:"memory_size_in_bytes"` + MemorySizeInBytes int64 `json:"memory_size_in_bytes"` MissCount int `json:"miss_count"` TotalCount int `json:"total_count"` } +func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CacheCount = value + case float64: + f := int(v) + s.CacheCount = f + } + + case "cache_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CacheSize = value + case float64: + f := int(v) + s.CacheSize = f + } + + case "evictions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Evictions = value + case float64: + f := int(v) + s.Evictions = f + } + + case "hit_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.HitCount = value + case float64: + f := int(v) + s.HitCount = f + } + + case "memory_size": + if err := dec.Decode(&s.MemorySize); err != nil { + return err + } + + case "memory_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemorySizeInBytes = value + case float64: + f := int64(v) + s.MemorySizeInBytes = f + } + + case "miss_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MissCount = value + case float64: + f := int(v) + s.MissCount = f + } + + case "total_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TotalCount = value + case float64: + f := int(v) + s.TotalCount = f + } + + } + } + return nil +} + // NewQueryCacheStats returns a QueryCacheStats. func NewQueryCacheStats() *QueryCacheStats { r := &QueryCacheStats{} diff --git a/typedapi/types/queryprofile.go b/typedapi/types/queryprofile.go old mode 100755 new mode 100644 index e55b9f7fd4..c89d87439a --- a/typedapi/types/queryprofile.go +++ b/typedapi/types/queryprofile.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // QueryProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L118-L124 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L118-L124 type QueryProfile struct { Breakdown QueryBreakdown `json:"breakdown"` Children []QueryProfile `json:"children,omitempty"` @@ -31,6 +39,57 @@ type QueryProfile struct { Type string `json:"type"` } +func (s *QueryProfile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "breakdown": + if err := dec.Decode(&s.Breakdown); err != nil { + return err + } + + case "children": + if err := dec.Decode(&s.Children); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "time_in_nanos": + if err := dec.Decode(&s.TimeInNanos); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewQueryProfile returns a QueryProfile. func NewQueryProfile() *QueryProfile { r := &QueryProfile{} diff --git a/typedapi/types/querystringquery.go b/typedapi/types/querystringquery.go old mode 100755 new mode 100644 index 7aef512e2d..93c56f27d5 --- a/typedapi/types/querystringquery.go +++ b/typedapi/types/querystringquery.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/operator" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/textquerytype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // QueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L233-L269 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L233-L269 type QueryStringQuery struct { AllowLeadingWildcard *bool `json:"allow_leading_wildcard,omitempty"` AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` @@ -58,6 +66,305 @@ type QueryStringQuery struct { Type *textquerytype.TextQueryType `json:"type,omitempty"` } +func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_leading_wildcard": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowLeadingWildcard = &value + case bool: + s.AllowLeadingWildcard = &v + } + + case "analyze_wildcard": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AnalyzeWildcard = &value + case bool: + s.AnalyzeWildcard = &v + } + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "auto_generate_synonyms_phrase_query": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AutoGenerateSynonymsPhraseQuery = &value + case bool: + s.AutoGenerateSynonymsPhraseQuery = &v + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "default_field": + if err := dec.Decode(&s.DefaultField); err != nil { + return err + } + + case "default_operator": + if err := dec.Decode(&s.DefaultOperator); err != nil { + return err + } + + case "enable_position_increments": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EnablePositionIncrements = &value + case bool: + s.EnablePositionIncrements = &v + } + + case "escape": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Escape = &value + case bool: + s.Escape = &v + } + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "fuzzy_max_expansions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FuzzyMaxExpansions = &value + case float64: + f := int(v) + s.FuzzyMaxExpansions = &f + } + + case "fuzzy_prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FuzzyPrefixLength = &value + case float64: + f := int(v) + s.FuzzyPrefixLength = &f + } + + case "fuzzy_rewrite": + if err := dec.Decode(&s.FuzzyRewrite); err != nil { + return err + } + + case "fuzzy_transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FuzzyTranspositions = &value + case bool: + s.FuzzyTranspositions = &v + } + + case "lenient": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v + } + + case "max_determinized_states": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDeterminizedStates = &value + case float64: + f := int(v) + s.MaxDeterminizedStates = &f + } + + case "minimum_should_match": + if err := dec.Decode(&s.MinimumShouldMatch); err != nil { + return err + } + + case "phrase_slop": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.PhraseSlop = &f + case float64: + f := Float64(v) + s.PhraseSlop = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "quote_analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QuoteAnalyzer = &o + + case "quote_field_suffix": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QuoteFieldSuffix = &o + + case "rewrite": + if err := dec.Decode(&s.Rewrite); err != nil { + return err + } + + case "tie_breaker": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.TieBreaker = &f + case float64: + f := Float64(v) + s.TieBreaker = &f + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewQueryStringQuery returns a QueryStringQuery. func NewQueryStringQuery() *QueryStringQuery { r := &QueryStringQuery{} diff --git a/typedapi/types/queryvector.go b/typedapi/types/queryvector.go old mode 100755 new mode 100644 index 74bbcba496..d8a9b72530 --- a/typedapi/types/queryvector.go +++ b/typedapi/types/queryvector.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // QueryVector type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Knn.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Knn.ts#L24-L24 type QueryVector []float32 diff --git a/typedapi/types/queryvectorbuilder.go b/typedapi/types/queryvectorbuilder.go old mode 100755 new mode 100644 index 8c62b1c387..1f40cf9e93 --- a/typedapi/types/queryvectorbuilder.go +++ b/typedapi/types/queryvectorbuilder.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // QueryVectorBuilder type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Knn.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Knn.ts#L43-L46 type QueryVectorBuilder struct { TextEmbedding *TextEmbedding `json:"text_embedding,omitempty"` } diff --git a/typedapi/types/querywatch.go b/typedapi/types/querywatch.go old mode 100755 new mode 100644 index 12c6be96df..28c534fbdd --- a/typedapi/types/querywatch.go +++ b/typedapi/types/querywatch.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // QueryWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Watch.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Watch.ts#L58-L64 type QueryWatch struct { Id_ string `json:"_id"` PrimaryTerm_ *int `json:"_primary_term,omitempty"` @@ -31,6 +41,62 @@ type QueryWatch struct { Watch *Watch `json:"watch,omitempty"` } +func (s *QueryWatch) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_primary_term": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrimaryTerm_ = &value + case float64: + f := int(v) + s.PrimaryTerm_ = &f + } + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "watch": + if err := dec.Decode(&s.Watch); err != nil { + return err + } + + } + } + return nil +} + // NewQueryWatch returns a QueryWatch. func NewQueryWatch() *QueryWatch { r := &QueryWatch{} diff --git a/typedapi/types/questionansweringinferenceoptions.go b/typedapi/types/questionansweringinferenceoptions.go old mode 100755 new mode 100644 index ba2f391572..84c35c90e7 --- a/typedapi/types/questionansweringinferenceoptions.go +++ b/typedapi/types/questionansweringinferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // QuestionAnsweringInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L251-L261 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L251-L261 type QuestionAnsweringInferenceOptions struct { // MaxAnswerLength The maximum answer length to consider MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -35,6 +45,71 @@ type QuestionAnsweringInferenceOptions struct { Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` } +func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_answer_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAnswerLength = &value + case float64: + f := int(v) + s.MaxAnswerLength = &f + } + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewQuestionAnsweringInferenceOptions returns a QuestionAnsweringInferenceOptions. func NewQuestionAnsweringInferenceOptions() *QuestionAnsweringInferenceOptions { r := &QuestionAnsweringInferenceOptions{} diff --git a/typedapi/types/questionansweringinferenceupdateoptions.go b/typedapi/types/questionansweringinferenceupdateoptions.go old mode 100755 new mode 100644 index 662c71f072..ece04d7d0e --- a/typedapi/types/questionansweringinferenceupdateoptions.go +++ b/typedapi/types/questionansweringinferenceupdateoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // QuestionAnsweringInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L379-L390 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L379-L390 type QuestionAnsweringInferenceUpdateOptions struct { // MaxAnswerLength The maximum answer length to consider for extraction MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -37,6 +47,79 @@ type QuestionAnsweringInferenceUpdateOptions struct { Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` } +func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_answer_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAnswerLength = &value + case float64: + f := int(v) + s.MaxAnswerLength = &f + } + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "question": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Question = o + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewQuestionAnsweringInferenceUpdateOptions returns a QuestionAnsweringInferenceUpdateOptions. func NewQuestionAnsweringInferenceUpdateOptions() *QuestionAnsweringInferenceUpdateOptions { r := &QuestionAnsweringInferenceUpdateOptions{} diff --git a/typedapi/types/randomscorefunction.go b/typedapi/types/randomscorefunction.go old mode 100755 new mode 100644 index aa9d44e210..265ab8b263 --- a/typedapi/types/randomscorefunction.go +++ b/typedapi/types/randomscorefunction.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RandomScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L65-L68 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L65-L68 type RandomScoreFunction struct { Field *string `json:"field,omitempty"` Seed string `json:"seed,omitempty"` } +func (s *RandomScoreFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "seed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Seed = o + + } + } + return nil +} + // NewRandomScoreFunction returns a RandomScoreFunction. func NewRandomScoreFunction() *RandomScoreFunction { r := &RandomScoreFunction{} diff --git a/typedapi/types/rangeaggregate.go b/typedapi/types/rangeaggregate.go old mode 100755 new mode 100644 index 6795c60443..f8610a14bc --- a/typedapi/types/rangeaggregate.go +++ b/typedapi/types/rangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // RangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L530-L531 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L531-L532 type RangeAggregate struct { - Buckets BucketsRangeBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsRangeBucket `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *RangeAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *RangeAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]RangeBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []RangeBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/rangeaggregation.go b/typedapi/types/rangeaggregation.go old mode 100755 new mode 100644 index 4c00c65ff4..1ae5047feb --- a/typedapi/types/rangeaggregation.go +++ b/typedapi/types/rangeaggregation.go @@ -16,26 +16,118 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // RangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L289-L296 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L289-L296 type RangeAggregation struct { - Field *string `json:"field,omitempty"` - Format *string `json:"format,omitempty"` - Keyed *bool `json:"keyed,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing *int `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` - Ranges []AggregationRange `json:"ranges,omitempty"` - Script Script `json:"script,omitempty"` + Field *string `json:"field,omitempty"` + Format *string `json:"format,omitempty"` + Keyed *bool `json:"keyed,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing *int `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` + Ranges []AggregationRange `json:"ranges,omitempty"` + Script Script `json:"script,omitempty"` +} + +func (s *RangeAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "keyed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Keyed = &value + case bool: + s.Keyed = &v + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Missing = &value + case float64: + f := int(v) + s.Missing = &f + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "ranges": + if err := dec.Decode(&s.Ranges); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil } // NewRangeAggregation returns a RangeAggregation. diff --git a/typedapi/types/rangebucket.go b/typedapi/types/rangebucket.go old mode 100755 new mode 100644 index 08c04a486b..3a4bd797e6 --- a/typedapi/types/rangebucket.go +++ b/typedapi/types/rangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // RangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L533-L540 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L534-L541 type RangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -47,6 +49,7 @@ type RangeBucket struct { } func (s *RangeBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -60,477 +63,589 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "from": - if err := dec.Decode(&s.From); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.From = &f + case float64: + f := Float64(v) + s.From = &f } case "from_as_string": - if err := dec.Decode(&s.FromAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.FromAsString = &o case "key": - if err := dec.Decode(&s.Key); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = &o case "to": - if err := dec.Decode(&s.To); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.To = &f + case float64: + f := Float64(v) + s.To = &f } case "to_as_string": - if err := dec.Decode(&s.ToAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.ToAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -556,6 +671,7 @@ func (s RangeBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/rangequery.go b/typedapi/types/rangequery.go old mode 100755 new mode 100644 index a7cea342e7..a5ba323d03 --- a/typedapi/types/rangequery.go +++ b/typedapi/types/rangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // DateRangeQuery // NumberRangeQuery // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L92-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L92-L94 type RangeQuery interface{} diff --git a/typedapi/types/rankevalhit.go b/typedapi/types/rankevalhit.go old mode 100755 new mode 100644 index d8926b2cb5..4a70892b78 --- a/typedapi/types/rankevalhit.go +++ b/typedapi/types/rankevalhit.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalHit type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L141-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L141-L145 type RankEvalHit struct { Id_ string `json:"_id"` Index_ string `json:"_index"` Score_ Float64 `json:"_score"` } +func (s *RankEvalHit) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score_ = f + case float64: + f := Float64(v) + s.Score_ = f + } + + } + } + return nil +} + // NewRankEvalHit returns a RankEvalHit. func NewRankEvalHit() *RankEvalHit { r := &RankEvalHit{} diff --git a/typedapi/types/rankevalhititem.go b/typedapi/types/rankevalhititem.go old mode 100755 new mode 100644 index cd327c40d6..76f40ed091 --- a/typedapi/types/rankevalhititem.go +++ b/typedapi/types/rankevalhititem.go @@ -16,18 +16,55 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // RankEvalHitItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L136-L139 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L136-L139 type RankEvalHitItem struct { Hit RankEvalHit `json:"hit"` Rating Float64 `json:"rating,omitempty"` } +func (s *RankEvalHitItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hit": + if err := dec.Decode(&s.Hit); err != nil { + return err + } + + case "rating": + if err := dec.Decode(&s.Rating); err != nil { + return err + } + + } + } + return nil +} + // NewRankEvalHitItem returns a RankEvalHitItem. func NewRankEvalHitItem() *RankEvalHitItem { r := &RankEvalHitItem{} diff --git a/typedapi/types/rankevalmetric.go b/typedapi/types/rankevalmetric.go old mode 100755 new mode 100644 index fd2fbddb8b..07b28497d2 --- a/typedapi/types/rankevalmetric.go +++ b/typedapi/types/rankevalmetric.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RankEvalMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L90-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L90-L96 type RankEvalMetric struct { Dcg *RankEvalMetricDiscountedCumulativeGain `json:"dcg,omitempty"` ExpectedReciprocalRank *RankEvalMetricExpectedReciprocalRank `json:"expected_reciprocal_rank,omitempty"` diff --git a/typedapi/types/rankevalmetricdetail.go b/typedapi/types/rankevalmetricdetail.go old mode 100755 new mode 100644 index 7c673a70a2..f445523e07 --- a/typedapi/types/rankevalmetricdetail.go +++ b/typedapi/types/rankevalmetricdetail.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // RankEvalMetricDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L125-L134 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L125-L134 type RankEvalMetricDetail struct { // Hits The hits section shows a grouping of the search results with their supplied // ratings @@ -44,6 +50,60 @@ type RankEvalMetricDetail struct { UnratedDocs []UnratedDocument `json:"unrated_docs"` } +func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hits": + if err := dec.Decode(&s.Hits); err != nil { + return err + } + + case "metric_details": + if s.MetricDetails == nil { + s.MetricDetails = make(map[string]map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.MetricDetails); err != nil { + return err + } + + case "metric_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MetricScore = f + case float64: + f := Float64(v) + s.MetricScore = f + } + + case "unrated_docs": + if err := dec.Decode(&s.UnratedDocs); err != nil { + return err + } + + } + } + return nil +} + // NewRankEvalMetricDetail returns a RankEvalMetricDetail. func NewRankEvalMetricDetail() *RankEvalMetricDetail { r := &RankEvalMetricDetail{ diff --git a/typedapi/types/rankevalmetricdiscountedcumulativegain.go b/typedapi/types/rankevalmetricdiscountedcumulativegain.go old mode 100755 new mode 100644 index 71f4f361b6..3095aea084 --- a/typedapi/types/rankevalmetricdiscountedcumulativegain.go +++ b/typedapi/types/rankevalmetricdiscountedcumulativegain.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricDiscountedCumulativeGain type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L66-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L66-L77 type RankEvalMetricDiscountedCumulativeGain struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -31,6 +41,56 @@ type RankEvalMetricDiscountedCumulativeGain struct { Normalize *bool `json:"normalize,omitempty"` } +func (s *RankEvalMetricDiscountedCumulativeGain) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "normalize": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Normalize = &value + case bool: + s.Normalize = &v + } + + } + } + return nil +} + // NewRankEvalMetricDiscountedCumulativeGain returns a RankEvalMetricDiscountedCumulativeGain. func NewRankEvalMetricDiscountedCumulativeGain() *RankEvalMetricDiscountedCumulativeGain { r := &RankEvalMetricDiscountedCumulativeGain{} diff --git a/typedapi/types/rankevalmetricexpectedreciprocalrank.go b/typedapi/types/rankevalmetricexpectedreciprocalrank.go old mode 100755 new mode 100644 index f68919b438..87693263d7 --- a/typedapi/types/rankevalmetricexpectedreciprocalrank.go +++ b/typedapi/types/rankevalmetricexpectedreciprocalrank.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricExpectedReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L79-L88 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L79-L88 type RankEvalMetricExpectedReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -31,6 +41,58 @@ type RankEvalMetricExpectedReciprocalRank struct { MaximumRelevance int `json:"maximum_relevance"` } +func (s *RankEvalMetricExpectedReciprocalRank) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "maximum_relevance": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaximumRelevance = value + case float64: + f := int(v) + s.MaximumRelevance = f + } + + } + } + return nil +} + // NewRankEvalMetricExpectedReciprocalRank returns a RankEvalMetricExpectedReciprocalRank. func NewRankEvalMetricExpectedReciprocalRank() *RankEvalMetricExpectedReciprocalRank { r := &RankEvalMetricExpectedReciprocalRank{} diff --git a/typedapi/types/rankevalmetricmeanreciprocalrank.go b/typedapi/types/rankevalmetricmeanreciprocalrank.go old mode 100755 new mode 100644 index 16b91daf28..7df617f0d0 --- a/typedapi/types/rankevalmetricmeanreciprocalrank.go +++ b/typedapi/types/rankevalmetricmeanreciprocalrank.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricMeanReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L60-L64 type RankEvalMetricMeanReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -32,6 +42,58 @@ type RankEvalMetricMeanReciprocalRank struct { RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` } +func (s *RankEvalMetricMeanReciprocalRank) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "relevant_rating_threshold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelevantRatingThreshold = &value + case float64: + f := int(v) + s.RelevantRatingThreshold = &f + } + + } + } + return nil +} + // NewRankEvalMetricMeanReciprocalRank returns a RankEvalMetricMeanReciprocalRank. func NewRankEvalMetricMeanReciprocalRank() *RankEvalMetricMeanReciprocalRank { r := &RankEvalMetricMeanReciprocalRank{} diff --git a/typedapi/types/rankevalmetricprecision.go b/typedapi/types/rankevalmetricprecision.go old mode 100755 new mode 100644 index a0dbed5964..4edb068e63 --- a/typedapi/types/rankevalmetricprecision.go +++ b/typedapi/types/rankevalmetricprecision.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L42-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L42-L52 type RankEvalMetricPrecision struct { // IgnoreUnlabeled Controls how unlabeled documents in the search results are counted. If set to // true, unlabeled documents are ignored and neither count as relevant or @@ -36,6 +46,72 @@ type RankEvalMetricPrecision struct { RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` } +func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ignore_unlabeled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnlabeled = &value + case bool: + s.IgnoreUnlabeled = &v + } + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "relevant_rating_threshold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelevantRatingThreshold = &value + case float64: + f := int(v) + s.RelevantRatingThreshold = &f + } + + } + } + return nil +} + // NewRankEvalMetricPrecision returns a RankEvalMetricPrecision. func NewRankEvalMetricPrecision() *RankEvalMetricPrecision { r := &RankEvalMetricPrecision{} diff --git a/typedapi/types/rankevalmetricratingtreshold.go b/typedapi/types/rankevalmetricratingtreshold.go old mode 100755 new mode 100644 index 160f989993..8efca27426 --- a/typedapi/types/rankevalmetricratingtreshold.go +++ b/typedapi/types/rankevalmetricratingtreshold.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricRatingTreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L34-L40 type RankEvalMetricRatingTreshold struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -32,6 +42,58 @@ type RankEvalMetricRatingTreshold struct { RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` } +func (s *RankEvalMetricRatingTreshold) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "relevant_rating_threshold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelevantRatingThreshold = &value + case float64: + f := int(v) + s.RelevantRatingThreshold = &f + } + + } + } + return nil +} + // NewRankEvalMetricRatingTreshold returns a RankEvalMetricRatingTreshold. func NewRankEvalMetricRatingTreshold() *RankEvalMetricRatingTreshold { r := &RankEvalMetricRatingTreshold{} diff --git a/typedapi/types/rankevalmetricrecall.go b/typedapi/types/rankevalmetricrecall.go old mode 100755 new mode 100644 index 49102613ed..fa6428b97b --- a/typedapi/types/rankevalmetricrecall.go +++ b/typedapi/types/rankevalmetricrecall.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalMetricRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L54-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L54-L58 type RankEvalMetricRecall struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -32,6 +42,58 @@ type RankEvalMetricRecall struct { RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` } +func (s *RankEvalMetricRecall) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "k": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "relevant_rating_threshold": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelevantRatingThreshold = &value + case float64: + f := int(v) + s.RelevantRatingThreshold = &f + } + + } + } + return nil +} + // NewRankEvalMetricRecall returns a RankEvalMetricRecall. func NewRankEvalMetricRecall() *RankEvalMetricRecall { r := &RankEvalMetricRecall{} diff --git a/typedapi/types/rankevalquery.go b/typedapi/types/rankevalquery.go old mode 100755 new mode 100644 index 2a2f6fb8f1..5265be4a58 --- a/typedapi/types/rankevalquery.go +++ b/typedapi/types/rankevalquery.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankEvalQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L111-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L111-L114 type RankEvalQuery struct { Query Query `json:"query"` Size *int `json:"size,omitempty"` } +func (s *RankEvalQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + // NewRankEvalQuery returns a RankEvalQuery. func NewRankEvalQuery() *RankEvalQuery { r := &RankEvalQuery{} diff --git a/typedapi/types/rankevalrequestitem.go b/typedapi/types/rankevalrequestitem.go old mode 100755 new mode 100644 index fe24862027..28ae56e2ea --- a/typedapi/types/rankevalrequestitem.go +++ b/typedapi/types/rankevalrequestitem.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // RankEvalRequestItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L98-L109 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L98-L109 type RankEvalRequestItem struct { // Id The search request’s ID, used to group result details later. Id string `json:"id"` @@ -40,6 +44,54 @@ type RankEvalRequestItem struct { TemplateId *string `json:"template_id,omitempty"` } +func (s *RankEvalRequestItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "ratings": + if err := dec.Decode(&s.Ratings); err != nil { + return err + } + + case "request": + if err := dec.Decode(&s.Request); err != nil { + return err + } + + case "template_id": + if err := dec.Decode(&s.TemplateId); err != nil { + return err + } + + } + } + return nil +} + // NewRankEvalRequestItem returns a RankEvalRequestItem. func NewRankEvalRequestItem() *RankEvalRequestItem { r := &RankEvalRequestItem{ diff --git a/typedapi/types/rankfeaturefunction.go b/typedapi/types/rankfeaturefunction.go old mode 100755 new mode 100644 index f61fd9841c..e322c2e8fb --- a/typedapi/types/rankfeaturefunction.go +++ b/typedapi/types/rankfeaturefunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RankFeatureFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L137-L137 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L137-L137 type RankFeatureFunction struct { } diff --git a/typedapi/types/rankfeaturefunctionlinear.go b/typedapi/types/rankfeaturefunctionlinear.go old mode 100755 new mode 100644 index 26da628b07..4fe0d43883 --- a/typedapi/types/rankfeaturefunctionlinear.go +++ b/typedapi/types/rankfeaturefunctionlinear.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RankFeatureFunctionLinear type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L139-L139 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L139-L139 type RankFeatureFunctionLinear struct { } diff --git a/typedapi/types/rankfeaturefunctionlogarithm.go b/typedapi/types/rankfeaturefunctionlogarithm.go old mode 100755 new mode 100644 index a5f6372986..b280e31290 --- a/typedapi/types/rankfeaturefunctionlogarithm.go +++ b/typedapi/types/rankfeaturefunctionlogarithm.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankFeatureFunctionLogarithm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L141-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L141-L143 type RankFeatureFunctionLogarithm struct { ScalingFactor float32 `json:"scaling_factor"` } +func (s *RankFeatureFunctionLogarithm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "scaling_factor": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.ScalingFactor = f + case float64: + f := float32(v) + s.ScalingFactor = f + } + + } + } + return nil +} + // NewRankFeatureFunctionLogarithm returns a RankFeatureFunctionLogarithm. func NewRankFeatureFunctionLogarithm() *RankFeatureFunctionLogarithm { r := &RankFeatureFunctionLogarithm{} diff --git a/typedapi/types/rankfeaturefunctionsaturation.go b/typedapi/types/rankfeaturefunctionsaturation.go old mode 100755 new mode 100644 index 2830b629aa..794858cd64 --- a/typedapi/types/rankfeaturefunctionsaturation.go +++ b/typedapi/types/rankfeaturefunctionsaturation.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankFeatureFunctionSaturation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L145-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L145-L147 type RankFeatureFunctionSaturation struct { Pivot *float32 `json:"pivot,omitempty"` } +func (s *RankFeatureFunctionSaturation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "pivot": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Pivot = &f + case float64: + f := float32(v) + s.Pivot = &f + } + + } + } + return nil +} + // NewRankFeatureFunctionSaturation returns a RankFeatureFunctionSaturation. func NewRankFeatureFunctionSaturation() *RankFeatureFunctionSaturation { r := &RankFeatureFunctionSaturation{} diff --git a/typedapi/types/rankfeaturefunctionsigmoid.go b/typedapi/types/rankfeaturefunctionsigmoid.go old mode 100755 new mode 100644 index d72bc855ba..462b8c4a07 --- a/typedapi/types/rankfeaturefunctionsigmoid.go +++ b/typedapi/types/rankfeaturefunctionsigmoid.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankFeatureFunctionSigmoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L149-L152 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L149-L152 type RankFeatureFunctionSigmoid struct { Exponent float32 `json:"exponent"` Pivot float32 `json:"pivot"` } +func (s *RankFeatureFunctionSigmoid) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exponent": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Exponent = f + case float64: + f := float32(v) + s.Exponent = f + } + + case "pivot": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Pivot = f + case float64: + f := float32(v) + s.Pivot = f + } + + } + } + return nil +} + // NewRankFeatureFunctionSigmoid returns a RankFeatureFunctionSigmoid. func NewRankFeatureFunctionSigmoid() *RankFeatureFunctionSigmoid { r := &RankFeatureFunctionSigmoid{} diff --git a/typedapi/types/rankfeatureproperty.go b/typedapi/types/rankfeatureproperty.go old mode 100755 new mode 100644 index e421e92973..5f93a953ba --- a/typedapi/types/rankfeatureproperty.go +++ b/typedapi/types/rankfeatureproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // RankFeatureProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L181-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L181-L184 type RankFeatureProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -45,6 +47,7 @@ type RankFeatureProperty struct { } func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -64,6 +67,9 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -351,28 +357,54 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "positive_score_impact": - if err := dec.Decode(&s.PositiveScoreImpact); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PositiveScoreImpact = &value + case bool: + s.PositiveScoreImpact = &v } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -660,7 +692,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/rankfeaturequery.go b/typedapi/types/rankfeaturequery.go old mode 100755 new mode 100644 index f77b9e98ba..b2cac07425 --- a/typedapi/types/rankfeaturequery.go +++ b/typedapi/types/rankfeaturequery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RankFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L154-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L154-L162 type RankFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -33,6 +43,75 @@ type RankFeatureQuery struct { Sigmoid *RankFeatureFunctionSigmoid `json:"sigmoid,omitempty"` } +func (s *RankFeatureQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "linear": + if err := dec.Decode(&s.Linear); err != nil { + return err + } + + case "log": + if err := dec.Decode(&s.Log); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "saturation": + if err := dec.Decode(&s.Saturation); err != nil { + return err + } + + case "sigmoid": + if err := dec.Decode(&s.Sigmoid); err != nil { + return err + } + + } + } + return nil +} + // NewRankFeatureQuery returns a RankFeatureQuery. func NewRankFeatureQuery() *RankFeatureQuery { r := &RankFeatureQuery{} diff --git a/typedapi/types/rankfeaturesproperty.go b/typedapi/types/rankfeaturesproperty.go old mode 100755 new mode 100644 index f2eff0fc48..957b989261 --- a/typedapi/types/rankfeaturesproperty.go +++ b/typedapi/types/rankfeaturesproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // RankFeaturesProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L186-L188 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L186-L188 type RankFeaturesProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -44,6 +46,7 @@ type RankFeaturesProperty struct { } func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,6 +66,9 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -350,23 +356,40 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -654,7 +677,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } diff --git a/typedapi/types/raretermsaggregation.go b/typedapi/types/raretermsaggregation.go old mode 100755 new mode 100644 index d3bade4085..d600954761 --- a/typedapi/types/raretermsaggregation.go +++ b/typedapi/types/raretermsaggregation.go @@ -16,27 +16,136 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // RareTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L304-L312 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L304-L312 type RareTermsAggregation struct { - Exclude []string `json:"exclude,omitempty"` - Field *string `json:"field,omitempty"` - Include TermsInclude `json:"include,omitempty"` - MaxDocCount *int64 `json:"max_doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` - Precision *Float64 `json:"precision,omitempty"` - ValueType *string `json:"value_type,omitempty"` + Exclude []string `json:"exclude,omitempty"` + Field *string `json:"field,omitempty"` + Include TermsInclude `json:"include,omitempty"` + MaxDocCount *int64 `json:"max_doc_count,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Missing Missing `json:"missing,omitempty"` + Name *string `json:"name,omitempty"` + Precision *Float64 `json:"precision,omitempty"` + ValueType *string `json:"value_type,omitempty"` +} + +func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exclude": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Exclude = append(s.Exclude, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { + return err + } + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "include": + if err := dec.Decode(&s.Include); err != nil { + return err + } + + case "max_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxDocCount = &value + case float64: + f := int64(v) + s.MaxDocCount = &f + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "precision": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Precision = &f + case float64: + f := Float64(v) + s.Precision = &f + } + + case "value_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueType = &o + + } + } + return nil } // NewRareTermsAggregation returns a RareTermsAggregation. diff --git a/typedapi/types/rateaggregate.go b/typedapi/types/rateaggregate.go old mode 100755 new mode 100644 index f663e09fbf..fe8761d358 --- a/typedapi/types/rateaggregate.go +++ b/typedapi/types/rateaggregate.go @@ -16,21 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // RateAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L732-L736 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L741-L745 type RateAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Value Float64 `json:"value"` - ValueAsString *string `json:"value_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Value Float64 `json:"value"` + ValueAsString *string `json:"value_as_string,omitempty"` +} + +func (s *RateAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil } // NewRateAggregate returns a RateAggregate. diff --git a/typedapi/types/rateaggregation.go b/typedapi/types/rateaggregation.go old mode 100755 new mode 100644 index e4fa582584..7cd496cbdb --- a/typedapi/types/rateaggregation.go +++ b/typedapi/types/rateaggregation.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/calendarinterval" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/ratemode" + + "bytes" + "errors" + "io" + + "encoding/json" ) // RateAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L127-L130 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L127-L130 type RateAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -37,6 +43,59 @@ type RateAggregation struct { Unit *calendarinterval.CalendarInterval `json:"unit,omitempty"` } +func (s *RateAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "unit": + if err := dec.Decode(&s.Unit); err != nil { + return err + } + + } + } + return nil +} + // NewRateAggregation returns a RateAggregation. func NewRateAggregation() *RateAggregation { r := &RateAggregation{} diff --git a/typedapi/types/readexception.go b/typedapi/types/readexception.go old mode 100755 new mode 100644 index 8472174de0..50bb8c7a0d --- a/typedapi/types/readexception.go +++ b/typedapi/types/readexception.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReadException type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ccr/_types/FollowIndexStats.ts#L71-L75 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ccr/_types/FollowIndexStats.ts#L71-L75 type ReadException struct { Exception ErrorCause `json:"exception"` FromSeqNo int64 `json:"from_seq_no"` Retries int `json:"retries"` } +func (s *ReadException) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exception": + if err := dec.Decode(&s.Exception); err != nil { + return err + } + + case "from_seq_no": + if err := dec.Decode(&s.FromSeqNo); err != nil { + return err + } + + case "retries": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Retries = value + case float64: + f := int(v) + s.Retries = f + } + + } + } + return nil +} + // NewReadException returns a ReadException. func NewReadException() *ReadException { r := &ReadException{} diff --git a/typedapi/types/realmcache.go b/typedapi/types/realmcache.go old mode 100755 new mode 100644 index 724a3b47e4..6d628b6caf --- a/typedapi/types/realmcache.go +++ b/typedapi/types/realmcache.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RealmCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L260-L262 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L260-L262 type RealmCache struct { Size int64 `json:"size"` } +func (s *RealmCache) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Size = value + case float64: + f := int64(v) + s.Size = f + } + + } + } + return nil +} + // NewRealmCache returns a RealmCache. func NewRealmCache() *RealmCache { r := &RealmCache{} diff --git a/typedapi/types/realminfo.go b/typedapi/types/realminfo.go old mode 100755 new mode 100644 index b0de682072..79c5e7fbb6 --- a/typedapi/types/realminfo.go +++ b/typedapi/types/realminfo.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RealmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RealmInfo.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RealmInfo.ts#L22-L25 type RealmInfo struct { Name string `json:"name"` Type string `json:"type"` } +func (s *RealmInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewRealmInfo returns a RealmInfo. func NewRealmInfo() *RealmInfo { r := &RealmInfo{} diff --git a/typedapi/types/recording.go b/typedapi/types/recording.go old mode 100755 new mode 100644 index 7eb00a8345..b0196435bd --- a/typedapi/types/recording.go +++ b/typedapi/types/recording.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Recording type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L94-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L94-L99 type Recording struct { CumulativeExecutionCount *int64 `json:"cumulative_execution_count,omitempty"` CumulativeExecutionTime Duration `json:"cumulative_execution_time,omitempty"` @@ -30,6 +40,59 @@ type Recording struct { Name *string `json:"name,omitempty"` } +func (s *Recording) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cumulative_execution_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CumulativeExecutionCount = &value + case float64: + f := int64(v) + s.CumulativeExecutionCount = &f + } + + case "cumulative_execution_time": + if err := dec.Decode(&s.CumulativeExecutionTime); err != nil { + return err + } + + case "cumulative_execution_time_millis": + if err := dec.Decode(&s.CumulativeExecutionTimeMillis); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + } + } + return nil +} + // NewRecording returns a Recording. func NewRecording() *Recording { r := &Recording{} diff --git a/typedapi/types/recoverybytes.go b/typedapi/types/recoverybytes.go old mode 100755 new mode 100644 index c0ed07b541..8d139e3a6b --- a/typedapi/types/recoverybytes.go +++ b/typedapi/types/recoverybytes.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RecoveryBytes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L38-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L38-L48 type RecoveryBytes struct { Percent Percentage `json:"percent"` Recovered ByteSize `json:"recovered,omitempty"` @@ -35,6 +43,71 @@ type RecoveryBytes struct { TotalInBytes ByteSize `json:"total_in_bytes"` } +func (s *RecoveryBytes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "percent": + if err := dec.Decode(&s.Percent); err != nil { + return err + } + + case "recovered": + if err := dec.Decode(&s.Recovered); err != nil { + return err + } + + case "recovered_from_snapshot": + if err := dec.Decode(&s.RecoveredFromSnapshot); err != nil { + return err + } + + case "recovered_from_snapshot_in_bytes": + if err := dec.Decode(&s.RecoveredFromSnapshotInBytes); err != nil { + return err + } + + case "recovered_in_bytes": + if err := dec.Decode(&s.RecoveredInBytes); err != nil { + return err + } + + case "reused": + if err := dec.Decode(&s.Reused); err != nil { + return err + } + + case "reused_in_bytes": + if err := dec.Decode(&s.ReusedInBytes); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + case "total_in_bytes": + if err := dec.Decode(&s.TotalInBytes); err != nil { + return err + } + + } + } + return nil +} + // NewRecoveryBytes returns a RecoveryBytes. func NewRecoveryBytes() *RecoveryBytes { r := &RecoveryBytes{} diff --git a/typedapi/types/recoveryfiles.go b/typedapi/types/recoveryfiles.go old mode 100755 new mode 100644 index bb3d2c0bb1..333c3bbb32 --- a/typedapi/types/recoveryfiles.go +++ b/typedapi/types/recoveryfiles.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RecoveryFiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L56-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L56-L62 type RecoveryFiles struct { Details []FileDetails `json:"details,omitempty"` Percent Percentage `json:"percent"` @@ -31,6 +41,81 @@ type RecoveryFiles struct { Total int64 `json:"total"` } +func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "details": + if err := dec.Decode(&s.Details); err != nil { + return err + } + + case "percent": + if err := dec.Decode(&s.Percent); err != nil { + return err + } + + case "recovered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Recovered = value + case float64: + f := int64(v) + s.Recovered = f + } + + case "reused": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Reused = value + case float64: + f := int64(v) + s.Reused = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewRecoveryFiles returns a RecoveryFiles. func NewRecoveryFiles() *RecoveryFiles { r := &RecoveryFiles{} diff --git a/typedapi/types/recoveryindexstatus.go b/typedapi/types/recoveryindexstatus.go old mode 100755 new mode 100644 index 250c6c337a..1f9358e08d --- a/typedapi/types/recoveryindexstatus.go +++ b/typedapi/types/recoveryindexstatus.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RecoveryIndexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L64-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L64-L74 type RecoveryIndexStatus struct { Bytes *RecoveryBytes `json:"bytes,omitempty"` Files RecoveryFiles `json:"files"` @@ -35,6 +43,71 @@ type RecoveryIndexStatus struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *RecoveryIndexStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bytes": + if err := dec.Decode(&s.Bytes); err != nil { + return err + } + + case "files": + if err := dec.Decode(&s.Files); err != nil { + return err + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "source_throttle_time": + if err := dec.Decode(&s.SourceThrottleTime); err != nil { + return err + } + + case "source_throttle_time_in_millis": + if err := dec.Decode(&s.SourceThrottleTimeInMillis); err != nil { + return err + } + + case "target_throttle_time": + if err := dec.Decode(&s.TargetThrottleTime); err != nil { + return err + } + + case "target_throttle_time_in_millis": + if err := dec.Decode(&s.TargetThrottleTimeInMillis); err != nil { + return err + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewRecoveryIndexStatus returns a RecoveryIndexStatus. func NewRecoveryIndexStatus() *RecoveryIndexStatus { r := &RecoveryIndexStatus{} diff --git a/typedapi/types/recoveryorigin.go b/typedapi/types/recoveryorigin.go old mode 100755 new mode 100644 index f568ca93f9..591e4700d5 --- a/typedapi/types/recoveryorigin.go +++ b/typedapi/types/recoveryorigin.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RecoveryOrigin type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L76-L89 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L76-L89 type RecoveryOrigin struct { BootstrapNewHistoryUuid *bool `json:"bootstrap_new_history_uuid,omitempty"` Host *string `json:"host,omitempty"` @@ -38,6 +48,98 @@ type RecoveryOrigin struct { Version *string `json:"version,omitempty"` } +func (s *RecoveryOrigin) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bootstrap_new_history_uuid": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.BootstrapNewHistoryUuid = &value + case bool: + s.BootstrapNewHistoryUuid = &v + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "hostname": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Hostname = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "repository": + if err := dec.Decode(&s.Repository); err != nil { + return err + } + + case "restoreUUID": + if err := dec.Decode(&s.RestoreUUID); err != nil { + return err + } + + case "snapshot": + if err := dec.Decode(&s.Snapshot); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewRecoveryOrigin returns a RecoveryOrigin. func NewRecoveryOrigin() *RecoveryOrigin { r := &RecoveryOrigin{} diff --git a/typedapi/types/recoveryrecord.go b/typedapi/types/recoveryrecord.go old mode 100755 new mode 100644 index 4e1d622b99..10c934742d --- a/typedapi/types/recoveryrecord.go +++ b/typedapi/types/recoveryrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RecoveryRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/recovery/types.ts#L24-L155 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/recovery/types.ts#L24-L155 type RecoveryRecord struct { // Bytes number of bytes to recover Bytes *string `json:"bytes,omitempty"` @@ -78,6 +86,207 @@ type RecoveryRecord struct { Type *string `json:"type,omitempty"` } +func (s *RecoveryRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bytes", "b": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Bytes = &o + + case "bytes_percent", "bp": + if err := dec.Decode(&s.BytesPercent); err != nil { + return err + } + + case "bytes_recovered", "br": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BytesRecovered = &o + + case "bytes_total", "tb": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BytesTotal = &o + + case "files", "f": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Files = &o + + case "files_percent", "fp": + if err := dec.Decode(&s.FilesPercent); err != nil { + return err + } + + case "files_recovered", "fr": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FilesRecovered = &o + + case "files_total", "tf": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FilesTotal = &o + + case "index", "i", "idx": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "repository", "rep": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Repository = &o + + case "shard", "s", "sh": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shard = &o + + case "snapshot", "snap": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Snapshot = &o + + case "source_host", "shost": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SourceHost = &o + + case "source_node", "snode": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SourceNode = &o + + case "stage", "st": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Stage = &o + + case "start_time", "start": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "start_time_millis", "start_millis": + if err := dec.Decode(&s.StartTimeMillis); err != nil { + return err + } + + case "stop_time", "stop": + if err := dec.Decode(&s.StopTime); err != nil { + return err + } + + case "stop_time_millis", "stop_millis": + if err := dec.Decode(&s.StopTimeMillis); err != nil { + return err + } + + case "target_host", "thost": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TargetHost = &o + + case "target_node", "tnode": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TargetNode = &o + + case "time", "t", "ti": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + case "translog_ops", "to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TranslogOps = &o + + case "translog_ops_percent", "top": + if err := dec.Decode(&s.TranslogOpsPercent); err != nil { + return err + } + + case "translog_ops_recovered", "tor": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TranslogOpsRecovered = &o + + case "type", "ty": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + } + } + return nil +} + // NewRecoveryRecord returns a RecoveryRecord. func NewRecoveryRecord() *RecoveryRecord { r := &RecoveryRecord{} diff --git a/typedapi/types/recoverystartstatus.go b/typedapi/types/recoverystartstatus.go old mode 100755 new mode 100644 index 2cbf9fa6a8..ae78ec51b2 --- a/typedapi/types/recoverystartstatus.go +++ b/typedapi/types/recoverystartstatus.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RecoveryStartStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L91-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L91-L96 type RecoveryStartStatus struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` @@ -30,6 +38,46 @@ type RecoveryStartStatus struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *RecoveryStartStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "check_index_time": + if err := dec.Decode(&s.CheckIndexTime); err != nil { + return err + } + + case "check_index_time_in_millis": + if err := dec.Decode(&s.CheckIndexTimeInMillis); err != nil { + return err + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewRecoveryStartStatus returns a RecoveryStartStatus. func NewRecoveryStartStatus() *RecoveryStartStatus { r := &RecoveryStartStatus{} diff --git a/typedapi/types/recoverystats.go b/typedapi/types/recoverystats.go old mode 100755 new mode 100644 index b7f7fa4932..32966b148f --- a/typedapi/types/recoverystats.go +++ b/typedapi/types/recoverystats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RecoveryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L161-L166 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L161-L166 type RecoveryStats struct { CurrentAsSource int64 `json:"current_as_source"` CurrentAsTarget int64 `json:"current_as_target"` @@ -30,6 +40,66 @@ type RecoveryStats struct { ThrottleTimeInMillis int64 `json:"throttle_time_in_millis"` } +func (s *RecoveryStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current_as_source": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CurrentAsSource = value + case float64: + f := int64(v) + s.CurrentAsSource = f + } + + case "current_as_target": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CurrentAsTarget = value + case float64: + f := int64(v) + s.CurrentAsTarget = f + } + + case "throttle_time": + if err := dec.Decode(&s.ThrottleTime); err != nil { + return err + } + + case "throttle_time_in_millis": + if err := dec.Decode(&s.ThrottleTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewRecoveryStats returns a RecoveryStats. func NewRecoveryStats() *RecoveryStats { r := &RecoveryStats{} diff --git a/typedapi/types/recoverystatus.go b/typedapi/types/recoverystatus.go old mode 100755 new mode 100644 index 4d34125c34..fb33ee2f50 --- a/typedapi/types/recoverystatus.go +++ b/typedapi/types/recoverystatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RecoveryStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L98-L100 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L98-L100 type RecoveryStatus struct { Shards []ShardRecovery `json:"shards"` } diff --git a/typedapi/types/refreshstats.go b/typedapi/types/refreshstats.go old mode 100755 new mode 100644 index ceb0efed9d..2d88cb834f --- a/typedapi/types/refreshstats.go +++ b/typedapi/types/refreshstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RefreshStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L168-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L168-L175 type RefreshStats struct { ExternalTotal int64 `json:"external_total"` ExternalTotalTimeInMillis int64 `json:"external_total_time_in_millis"` @@ -32,6 +42,86 @@ type RefreshStats struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *RefreshStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "external_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ExternalTotal = value + case float64: + f := int64(v) + s.ExternalTotal = f + } + + case "external_total_time_in_millis": + if err := dec.Decode(&s.ExternalTotalTimeInMillis); err != nil { + return err + } + + case "listeners": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Listeners = value + case float64: + f := int64(v) + s.Listeners = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewRefreshStats returns a RefreshStats. func NewRefreshStats() *RefreshStats { r := &RefreshStats{} diff --git a/typedapi/types/regexoptions.go b/typedapi/types/regexoptions.go new file mode 100644 index 0000000000..59a44a8bb4 --- /dev/null +++ b/typedapi/types/regexoptions.go @@ -0,0 +1,90 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// RegexOptions type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L137-L140 +type RegexOptions struct { + Flags string `json:"flags,omitempty"` + MaxDeterminizedStates *int `json:"max_determinized_states,omitempty"` +} + +func (s *RegexOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = o + + case "max_determinized_states": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDeterminizedStates = &value + case float64: + f := int(v) + s.MaxDeterminizedStates = &f + } + + } + } + return nil +} + +// NewRegexOptions returns a RegexOptions. +func NewRegexOptions() *RegexOptions { + r := &RegexOptions{} + + return r +} diff --git a/typedapi/types/regexpquery.go b/typedapi/types/regexpquery.go old mode 100755 new mode 100644 index e6a6ede49b..a1d304beee --- a/typedapi/types/regexpquery.go +++ b/typedapi/types/regexpquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RegexpQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L102-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L102-L114 type RegexpQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` @@ -33,6 +43,106 @@ type RegexpQuery struct { Value string `json:"value"` } +func (s *RegexpQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "case_insensitive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CaseInsensitive = &value + case bool: + s.CaseInsensitive = &v + } + + case "flags": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Flags = &o + + case "max_determinized_states": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDeterminizedStates = &value + case float64: + f := int(v) + s.MaxDeterminizedStates = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "rewrite": + if err := dec.Decode(&s.Rewrite); err != nil { + return err + } + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewRegexpQuery returns a RegexpQuery. func NewRegexpQuery() *RegexpQuery { r := &RegexpQuery{} diff --git a/typedapi/types/regressioninferenceoptions.go b/typedapi/types/regressioninferenceoptions.go old mode 100755 new mode 100644 index 8cc81bccf1..58f95726f8 --- a/typedapi/types/regressioninferenceoptions.go +++ b/typedapi/types/regressioninferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RegressionInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L69-L78 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L69-L78 type RegressionInferenceOptions struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -31,6 +41,47 @@ type RegressionInferenceOptions struct { ResultsField *string `json:"results_field,omitempty"` } +func (s *RegressionInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_top_feature_importance_values": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopFeatureImportanceValues = &value + case float64: + f := int(v) + s.NumTopFeatureImportanceValues = &f + } + + case "results_field": + if err := dec.Decode(&s.ResultsField); err != nil { + return err + } + + } + } + return nil +} + // NewRegressionInferenceOptions returns a RegressionInferenceOptions. func NewRegressionInferenceOptions() *RegressionInferenceOptions { r := &RegressionInferenceOptions{} diff --git a/typedapi/types/reindexdestination.go b/typedapi/types/reindexdestination.go old mode 100755 new mode 100644 index 3e87956136..7dd3497c6c --- a/typedapi/types/reindexdestination.go +++ b/typedapi/types/reindexdestination.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/optype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/versiontype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ReindexDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex/types.ts#L39-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex/types.ts#L39-L45 type ReindexDestination struct { Index string `json:"index"` OpType *optype.OpType `json:"op_type,omitempty"` @@ -36,6 +42,54 @@ type ReindexDestination struct { VersionType *versiontype.VersionType `json:"version_type,omitempty"` } +func (s *ReindexDestination) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "op_type": + if err := dec.Decode(&s.OpType); err != nil { + return err + } + + case "pipeline": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pipeline = &o + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "version_type": + if err := dec.Decode(&s.VersionType); err != nil { + return err + } + + } + } + return nil +} + // NewReindexDestination returns a ReindexDestination. func NewReindexDestination() *ReindexDestination { r := &ReindexDestination{} diff --git a/typedapi/types/reindexnode.go b/typedapi/types/reindexnode.go old mode 100755 new mode 100644 index aa04c5d8a6..c58593eb99 --- a/typedapi/types/reindexnode.go +++ b/typedapi/types/reindexnode.go @@ -16,32 +16,99 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ReindexNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex_rethrottle/types.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex_rethrottle/types.ts#L33-L35 type ReindexNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` Ip string `json:"ip"` Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` - Tasks map[TaskId]ReindexTask `json:"tasks"` + Tasks map[string]ReindexTask `json:"tasks"` TransportAddress string `json:"transport_address"` } +func (s *ReindexNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "tasks": + if s.Tasks == nil { + s.Tasks = make(map[string]ReindexTask, 0) + } + if err := dec.Decode(&s.Tasks); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewReindexNode returns a ReindexNode. func NewReindexNode() *ReindexNode { r := &ReindexNode{ Attributes: make(map[string]string, 0), - Tasks: make(map[TaskId]ReindexTask, 0), + Tasks: make(map[string]ReindexTask, 0), } return r diff --git a/typedapi/types/reindexsource.go b/typedapi/types/reindexsource.go old mode 100755 new mode 100644 index e4fd9126e1..9d742ef6de --- a/typedapi/types/reindexsource.go +++ b/typedapi/types/reindexsource.go @@ -16,22 +16,136 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReindexSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex/types.ts#L47-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex/types.ts#L47-L57 type ReindexSource struct { - Index []string `json:"index"` - Query *Query `json:"query,omitempty"` - Remote *RemoteSource `json:"remote,omitempty"` - RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` - Size *int `json:"size,omitempty"` - Slice *SlicedScroll `json:"slice,omitempty"` - Sort []SortCombinations `json:"sort,omitempty"` - SourceFields_ []string `json:"_source,omitempty"` + Index []string `json:"index"` + Query *Query `json:"query,omitempty"` + Remote *RemoteSource `json:"remote,omitempty"` + RuntimeMappings RuntimeFields `json:"runtime_mappings,omitempty"` + Size *int `json:"size,omitempty"` + Slice *SlicedScroll `json:"slice,omitempty"` + Sort []SortCombinations `json:"sort,omitempty"` + SourceFields_ []string `json:"_source,omitempty"` +} + +func (s *ReindexSource) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Index = append(s.Index, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { + return err + } + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "remote": + if err := dec.Decode(&s.Remote); err != nil { + return err + } + + case "runtime_mappings": + if err := dec.Decode(&s.RuntimeMappings); err != nil { + return err + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "slice": + if err := dec.Decode(&s.Slice); err != nil { + return err + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + case "_source": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.SourceFields_ = append(s.SourceFields_, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.SourceFields_); err != nil { + return err + } + } + + } + } + return nil } // NewReindexSource returns a ReindexSource. diff --git a/typedapi/types/reindexstatus.go b/typedapi/types/reindexstatus.go old mode 100755 new mode 100644 index 01bdb69ab6..7c7b6cbb22 --- a/typedapi/types/reindexstatus.go +++ b/typedapi/types/reindexstatus.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReindexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex_rethrottle/types.ts#L37-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex_rethrottle/types.ts#L37-L51 type ReindexStatus struct { Batches int64 `json:"batches"` Created int64 `json:"created"` @@ -39,6 +49,172 @@ type ReindexStatus struct { VersionConflicts int64 `json:"version_conflicts"` } +func (s *ReindexStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "batches": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Batches = value + case float64: + f := int64(v) + s.Batches = f + } + + case "created": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Created = value + case float64: + f := int64(v) + s.Created = f + } + + case "deleted": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Deleted = value + case float64: + f := int64(v) + s.Deleted = f + } + + case "noops": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Noops = value + case float64: + f := int64(v) + s.Noops = f + } + + case "requests_per_second": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.RequestsPerSecond = f + case float64: + f := float32(v) + s.RequestsPerSecond = f + } + + case "retries": + if err := dec.Decode(&s.Retries); err != nil { + return err + } + + case "throttled": + if err := dec.Decode(&s.Throttled); err != nil { + return err + } + + case "throttled_millis": + if err := dec.Decode(&s.ThrottledMillis); err != nil { + return err + } + + case "throttled_until": + if err := dec.Decode(&s.ThrottledUntil); err != nil { + return err + } + + case "throttled_until_millis": + if err := dec.Decode(&s.ThrottledUntilMillis); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "updated": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Updated = value + case float64: + f := int64(v) + s.Updated = f + } + + case "version_conflicts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.VersionConflicts = value + case float64: + f := int64(v) + s.VersionConflicts = f + } + + } + } + return nil +} + // NewReindexStatus returns a ReindexStatus. func NewReindexStatus() *ReindexStatus { r := &ReindexStatus{} diff --git a/typedapi/types/reindextask.go b/typedapi/types/reindextask.go old mode 100755 new mode 100644 index 5b548ee514..24b1c2bd1d --- a/typedapi/types/reindextask.go +++ b/typedapi/types/reindextask.go @@ -16,24 +16,132 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReindexTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex_rethrottle/types.ts#L53-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex_rethrottle/types.ts#L53-L64 type ReindexTask struct { - Action string `json:"action"` - Cancellable bool `json:"cancellable"` - Description string `json:"description"` - Headers map[string][]string `json:"headers"` - Id int64 `json:"id"` - Node string `json:"node"` - RunningTimeInNanos int64 `json:"running_time_in_nanos"` - StartTimeInMillis int64 `json:"start_time_in_millis"` - Status ReindexStatus `json:"status"` - Type string `json:"type"` + Action string `json:"action"` + Cancellable bool `json:"cancellable"` + Description string `json:"description"` + Headers HttpHeaders `json:"headers"` + Id int64 `json:"id"` + Node string `json:"node"` + RunningTimeInNanos int64 `json:"running_time_in_nanos"` + StartTimeInMillis int64 `json:"start_time_in_millis"` + Status ReindexStatus `json:"status"` + Type string `json:"type"` +} + +func (s *ReindexTask) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Action = o + + case "cancellable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Cancellable = value + case bool: + s.Cancellable = v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "headers": + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = value + case float64: + f := int64(v) + s.Id = f + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "running_time_in_nanos": + if err := dec.Decode(&s.RunningTimeInNanos); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil } // NewReindexTask returns a ReindexTask. diff --git a/typedapi/types/reloaddetails.go b/typedapi/types/reloaddetails.go old mode 100755 new mode 100644 index cec3c8261d..d52f52729b --- a/typedapi/types/reloaddetails.go +++ b/typedapi/types/reloaddetails.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ReloadDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/reload_search_analyzers/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/reload_search_analyzers/types.ts#L20-L24 type ReloadDetails struct { Index string `json:"index"` ReloadedAnalyzers []string `json:"reloaded_analyzers"` diff --git a/typedapi/types/relocationfailureinfo.go b/typedapi/types/relocationfailureinfo.go old mode 100755 new mode 100644 index a1e128f81c..6fefec32ba --- a/typedapi/types/relocationfailureinfo.go +++ b/typedapi/types/relocationfailureinfo.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RelocationFailureInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Node.ts#L72-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Node.ts#L72-L74 type RelocationFailureInfo struct { FailedAttempts int `json:"failed_attempts"` } +func (s *RelocationFailureInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "failed_attempts": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailedAttempts = value + case float64: + f := int(v) + s.FailedAttempts = f + } + + } + } + return nil +} + // NewRelocationFailureInfo returns a RelocationFailureInfo. func NewRelocationFailureInfo() *RelocationFailureInfo { r := &RelocationFailureInfo{} diff --git a/typedapi/types/remotesource.go b/typedapi/types/remotesource.go old mode 100755 new mode 100644 index 6cea8fa570..9adb69222c --- a/typedapi/types/remotesource.go +++ b/typedapi/types/remotesource.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RemoteSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/reindex/types.ts#L59-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/reindex/types.ts#L59-L66 type RemoteSource struct { ConnectTimeout Duration `json:"connect_timeout,omitempty"` Headers map[string]string `json:"headers,omitempty"` @@ -32,6 +40,59 @@ type RemoteSource struct { Username *string `json:"username,omitempty"` } +func (s *RemoteSource) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "connect_timeout": + if err := dec.Decode(&s.ConnectTimeout); err != nil { + return err + } + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "password": + if err := dec.Decode(&s.Password); err != nil { + return err + } + + case "socket_timeout": + if err := dec.Decode(&s.SocketTimeout); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil +} + // NewRemoteSource returns a RemoteSource. func NewRemoteSource() *RemoteSource { r := &RemoteSource{ diff --git a/typedapi/types/removeaction.go b/typedapi/types/removeaction.go old mode 100755 new mode 100644 index 1623586e40..1ec3483be5 --- a/typedapi/types/removeaction.go +++ b/typedapi/types/removeaction.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RemoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/types.ts#L46-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/types.ts#L46-L53 type RemoveAction struct { Alias *string `json:"alias,omitempty"` Aliases []string `json:"aliases,omitempty"` @@ -31,6 +41,82 @@ type RemoveAction struct { MustExist *bool `json:"must_exist,omitempty"` } +func (s *RemoveAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alias": + if err := dec.Decode(&s.Alias); err != nil { + return err + } + + case "aliases": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Aliases = append(s.Aliases, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Aliases); err != nil { + return err + } + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "must_exist": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MustExist = &value + case bool: + s.MustExist = &v + } + + } + } + return nil +} + // NewRemoveAction returns a RemoveAction. func NewRemoveAction() *RemoveAction { r := &RemoveAction{} diff --git a/typedapi/types/removeduplicatestokenfilter.go b/typedapi/types/removeduplicatestokenfilter.go old mode 100755 new mode 100644 index 0ba613ad57..e0a88057f1 --- a/typedapi/types/removeduplicatestokenfilter.go +++ b/typedapi/types/removeduplicatestokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RemoveDuplicatesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L300-L302 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L301-L303 type RemoveDuplicatesTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *RemoveDuplicatesTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewRemoveDuplicatesTokenFilter returns a RemoveDuplicatesTokenFilter. func NewRemoveDuplicatesTokenFilter() *RemoveDuplicatesTokenFilter { r := &RemoveDuplicatesTokenFilter{} diff --git a/typedapi/types/removeindexaction.go b/typedapi/types/removeindexaction.go old mode 100755 new mode 100644 index e3994f7842..333c713abf --- a/typedapi/types/removeindexaction.go +++ b/typedapi/types/removeindexaction.go @@ -16,19 +16,84 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RemoveIndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/update_aliases/types.ts#L55-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/update_aliases/types.ts#L55-L60 type RemoveIndexAction struct { Index *string `json:"index,omitempty"` Indices []string `json:"indices,omitempty"` MustExist *bool `json:"must_exist,omitempty"` } +func (s *RemoveIndexAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "must_exist": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MustExist = &value + case bool: + s.MustExist = &v + } + + } + } + return nil +} + // NewRemoveIndexAction returns a RemoveIndexAction. func NewRemoveIndexAction() *RemoveIndexAction { r := &RemoveIndexAction{} diff --git a/typedapi/types/removeprocessor.go b/typedapi/types/removeprocessor.go old mode 100755 new mode 100644 index f7df605ebc..a687c4c806 --- a/typedapi/types/removeprocessor.go +++ b/typedapi/types/removeprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RemoveProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L311-L314 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L311-L314 type RemoveProcessor struct { Description *string `json:"description,omitempty"` Field []string `json:"field"` @@ -33,6 +43,99 @@ type RemoveProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Field = append(s.Field, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Field); err != nil { + return err + } + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewRemoveProcessor returns a RemoveProcessor. func NewRemoveProcessor() *RemoveProcessor { r := &RemoveProcessor{} diff --git a/typedapi/types/renameprocessor.go b/typedapi/types/renameprocessor.go old mode 100755 new mode 100644 index 7f6c593978..edc43e7ad6 --- a/typedapi/types/renameprocessor.go +++ b/typedapi/types/renameprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RenameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L316-L320 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L316-L320 type RenameProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type RenameProcessor struct { TargetField string `json:"target_field"` } +func (s *RenameProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewRenameProcessor returns a RenameProcessor. func NewRenameProcessor() *RenameProcessor { r := &RenameProcessor{} diff --git a/typedapi/types/reportingemailattachment.go b/typedapi/types/reportingemailattachment.go old mode 100755 new mode 100644 index 467ec7f8a8..5c19d1b227 --- a/typedapi/types/reportingemailattachment.go +++ b/typedapi/types/reportingemailattachment.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReportingEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L224-L232 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L224-L232 type ReportingEmailAttachment struct { Inline *bool `json:"inline,omitempty"` Interval Duration `json:"interval,omitempty"` @@ -31,6 +41,74 @@ type ReportingEmailAttachment struct { Url string `json:"url"` } +func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "inline": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Inline = &value + case bool: + s.Inline = &v + } + + case "interval": + if err := dec.Decode(&s.Interval); err != nil { + return err + } + + case "request": + if err := dec.Decode(&s.Request); err != nil { + return err + } + + case "retries": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Retries = &value + case float64: + f := int(v) + s.Retries = &f + } + + case "url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Url = o + + } + } + return nil +} + // NewReportingEmailAttachment returns a ReportingEmailAttachment. func NewReportingEmailAttachment() *ReportingEmailAttachment { r := &ReportingEmailAttachment{} diff --git a/typedapi/types/repositoriesrecord.go b/typedapi/types/repositoriesrecord.go old mode 100755 new mode 100644 index 8f541186b6..e35ddb91c4 --- a/typedapi/types/repositoriesrecord.go +++ b/typedapi/types/repositoriesrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RepositoriesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/repositories/types.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/repositories/types.ts#L20-L31 type RepositoriesRecord struct { // Id unique repository id Id *string `json:"id,omitempty"` diff --git a/typedapi/types/repository.go b/typedapi/types/repository.go old mode 100755 new mode 100644 index c24d538137..ca4634f2c9 --- a/typedapi/types/repository.go +++ b/typedapi/types/repository.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Repository type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotRepository.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotRepository.ts#L23-L27 type Repository struct { Settings RepositorySettings `json:"settings"` Type string `json:"type"` Uuid *string `json:"uuid,omitempty"` } +func (s *Repository) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + } + } + return nil +} + // NewRepository returns a Repository. func NewRepository() *Repository { r := &Repository{} diff --git a/typedapi/types/repositoryintegrityindicator.go b/typedapi/types/repositoryintegrityindicator.go new file mode 100644 index 0000000000..f3e51e9dbf --- /dev/null +++ b/typedapi/types/repositoryintegrityindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// RepositoryIntegrityIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L133-L137 +type RepositoryIntegrityIndicator struct { + Details *RepositoryIntegrityIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewRepositoryIntegrityIndicator returns a RepositoryIntegrityIndicator. +func NewRepositoryIntegrityIndicator() *RepositoryIntegrityIndicator { + r := &RepositoryIntegrityIndicator{} + + return r +} diff --git a/typedapi/types/repositoryintegrityindicatordetails.go b/typedapi/types/repositoryintegrityindicatordetails.go new file mode 100644 index 0000000000..9035e312dd --- /dev/null +++ b/typedapi/types/repositoryintegrityindicatordetails.go @@ -0,0 +1,102 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// RepositoryIntegrityIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L138-L142 +type RepositoryIntegrityIndicatorDetails struct { + Corrupted []string `json:"corrupted,omitempty"` + CorruptedRepositories *int64 `json:"corrupted_repositories,omitempty"` + TotalRepositories *int64 `json:"total_repositories,omitempty"` +} + +func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "corrupted": + if err := dec.Decode(&s.Corrupted); err != nil { + return err + } + + case "corrupted_repositories": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CorruptedRepositories = &value + case float64: + f := int64(v) + s.CorruptedRepositories = &f + } + + case "total_repositories": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalRepositories = &value + case float64: + f := int64(v) + s.TotalRepositories = &f + } + + } + } + return nil +} + +// NewRepositoryIntegrityIndicatorDetails returns a RepositoryIntegrityIndicatorDetails. +func NewRepositoryIntegrityIndicatorDetails() *RepositoryIntegrityIndicatorDetails { + r := &RepositoryIntegrityIndicatorDetails{} + + return r +} diff --git a/typedapi/types/repositorylocation.go b/typedapi/types/repositorylocation.go old mode 100755 new mode 100644 index 4b17c833d2..59aa0f0604 --- a/typedapi/types/repositorylocation.go +++ b/typedapi/types/repositorylocation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RepositoryLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 type RepositoryLocation struct { BasePath string `json:"base_path"` // Bucket Bucket name (GCP, S3) diff --git a/typedapi/types/repositorymeteringinformation.go b/typedapi/types/repositorymeteringinformation.go old mode 100755 new mode 100644 index 2959b94f70..22ec6b13b0 --- a/typedapi/types/repositorymeteringinformation.go +++ b/typedapi/types/repositorymeteringinformation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RepositoryMeteringInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 type RepositoryMeteringInformation struct { // Archived A flag that tells whether or not this object has been archived. When a // repository is closed or updated the @@ -58,6 +68,83 @@ type RepositoryMeteringInformation struct { RequestCounts RequestCounts `json:"request_counts"` } +func (s *RepositoryMeteringInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "archived": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Archived = value + case bool: + s.Archived = v + } + + case "cluster_version": + if err := dec.Decode(&s.ClusterVersion); err != nil { + return err + } + + case "repository_ephemeral_id": + if err := dec.Decode(&s.RepositoryEphemeralId); err != nil { + return err + } + + case "repository_location": + if err := dec.Decode(&s.RepositoryLocation); err != nil { + return err + } + + case "repository_name": + if err := dec.Decode(&s.RepositoryName); err != nil { + return err + } + + case "repository_started_at": + if err := dec.Decode(&s.RepositoryStartedAt); err != nil { + return err + } + + case "repository_stopped_at": + if err := dec.Decode(&s.RepositoryStoppedAt); err != nil { + return err + } + + case "repository_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RepositoryType = o + + case "request_counts": + if err := dec.Decode(&s.RequestCounts); err != nil { + return err + } + + } + } + return nil +} + // NewRepositoryMeteringInformation returns a RepositoryMeteringInformation. func NewRepositoryMeteringInformation() *RepositoryMeteringInformation { r := &RepositoryMeteringInformation{} diff --git a/typedapi/types/repositorysettings.go b/typedapi/types/repositorysettings.go old mode 100755 new mode 100644 index b6bda2e154..ed42e4ed46 --- a/typedapi/types/repositorysettings.go +++ b/typedapi/types/repositorysettings.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // RepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotRepository.ts#L29-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotRepository.ts#L29-L38 type RepositorySettings struct { ChunkSize *string `json:"chunk_size,omitempty"` Compress string `json:"compress,omitempty"` @@ -31,6 +38,66 @@ type RepositorySettings struct { ReadOnly string `json:"read_only,omitempty"` } +func (s *RepositorySettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chunk_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ChunkSize = &o + + case "compress": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Compress = o + + case "concurrent_streams": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ConcurrentStreams = o + + case "location": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Location = o + + case "read_only", "readonly": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ReadOnly = o + + } + } + return nil +} + // NewRepositorySettings returns a RepositorySettings. func NewRepositorySettings() *RepositorySettings { r := &RepositorySettings{} diff --git a/typedapi/types/requestcachestats.go b/typedapi/types/requestcachestats.go old mode 100755 new mode 100644 index 3903441b48..c2841a922e --- a/typedapi/types/requestcachestats.go +++ b/typedapi/types/requestcachestats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RequestCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L177-L183 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L177-L183 type RequestCacheStats struct { Evictions int64 `json:"evictions"` HitCount int64 `json:"hit_count"` @@ -31,6 +41,94 @@ type RequestCacheStats struct { MissCount int64 `json:"miss_count"` } +func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Evictions = value + case float64: + f := int64(v) + s.Evictions = f + } + + case "hit_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HitCount = value + case float64: + f := int64(v) + s.HitCount = f + } + + case "memory_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MemorySize = &o + + case "memory_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemorySizeInBytes = value + case float64: + f := int64(v) + s.MemorySizeInBytes = f + } + + case "miss_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MissCount = value + case float64: + f := int64(v) + s.MissCount = f + } + + } + } + return nil +} + // NewRequestCacheStats returns a RequestCacheStats. func NewRequestCacheStats() *RequestCacheStats { r := &RequestCacheStats{} diff --git a/typedapi/types/requestcounts.go b/typedapi/types/requestcounts.go old mode 100755 new mode 100644 index 50066492cc..bfab5abcde --- a/typedapi/types/requestcounts.go +++ b/typedapi/types/requestcounts.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RequestCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 type RequestCounts struct { // GetBlob Number of Get Blob requests (Azure) GetBlob *int64 `json:"GetBlob,omitempty"` @@ -53,6 +63,191 @@ type RequestCounts struct { PutObject *int64 `json:"PutObject,omitempty"` } +func (s *RequestCounts) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "GetBlob": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.GetBlob = &value + case float64: + f := int64(v) + s.GetBlob = &f + } + + case "GetBlobProperties": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.GetBlobProperties = &value + case float64: + f := int64(v) + s.GetBlobProperties = &f + } + + case "GetObject": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.GetObject = &value + case float64: + f := int64(v) + s.GetObject = &f + } + + case "InsertObject": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InsertObject = &value + case float64: + f := int64(v) + s.InsertObject = &f + } + + case "ListBlobs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ListBlobs = &value + case float64: + f := int64(v) + s.ListBlobs = &f + } + + case "ListObjects": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ListObjects = &value + case float64: + f := int64(v) + s.ListObjects = &f + } + + case "PutBlob": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PutBlob = &value + case float64: + f := int64(v) + s.PutBlob = &f + } + + case "PutBlock": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PutBlock = &value + case float64: + f := int64(v) + s.PutBlock = &f + } + + case "PutBlockList": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PutBlockList = &value + case float64: + f := int64(v) + s.PutBlockList = &f + } + + case "PutMultipartObject": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PutMultipartObject = &value + case float64: + f := int64(v) + s.PutMultipartObject = &f + } + + case "PutObject": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PutObject = &value + case float64: + f := int64(v) + s.PutObject = &f + } + + } + } + return nil +} + // NewRequestCounts returns a RequestCounts. func NewRequestCounts() *RequestCounts { r := &RequestCounts{} diff --git a/typedapi/types/reroutedecision.go b/typedapi/types/reroutedecision.go old mode 100755 new mode 100644 index 8b0450924e..65b7d271c0 --- a/typedapi/types/reroutedecision.go +++ b/typedapi/types/reroutedecision.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RerouteDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L86-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L86-L90 type RerouteDecision struct { Decider string `json:"decider"` Decision string `json:"decision"` diff --git a/typedapi/types/rerouteexplanation.go b/typedapi/types/rerouteexplanation.go old mode 100755 new mode 100644 index 24049be510..2de73a0f41 --- a/typedapi/types/rerouteexplanation.go +++ b/typedapi/types/rerouteexplanation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RerouteExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L92-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L92-L96 type RerouteExplanation struct { Command string `json:"command"` Decisions []RerouteDecision `json:"decisions"` diff --git a/typedapi/types/rerouteparameters.go b/typedapi/types/rerouteparameters.go old mode 100755 new mode 100644 index 35e3fad36e..7ba56139b0 --- a/typedapi/types/rerouteparameters.go +++ b/typedapi/types/rerouteparameters.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RerouteParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/reroute/types.ts#L98-L105 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/reroute/types.ts#L98-L105 type RerouteParameters struct { AllowPrimary bool `json:"allow_primary"` FromNode *string `json:"from_node,omitempty"` @@ -32,6 +42,76 @@ type RerouteParameters struct { ToNode *string `json:"to_node,omitempty"` } +func (s *RerouteParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowPrimary = value + case bool: + s.AllowPrimary = v + } + + case "from_node": + if err := dec.Decode(&s.FromNode); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + case "to_node": + if err := dec.Decode(&s.ToNode); err != nil { + return err + } + + } + } + return nil +} + // NewRerouteParameters returns a RerouteParameters. func NewRerouteParameters() *RerouteParameters { r := &RerouteParameters{} diff --git a/typedapi/types/rescore.go b/typedapi/types/rescore.go old mode 100755 new mode 100644 index 2a839a234b..2e8767faa7 --- a/typedapi/types/rescore.go +++ b/typedapi/types/rescore.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Rescore type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/rescoring.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/rescoring.ts#L23-L26 type Rescore struct { Query RescoreQuery `json:"query"` WindowSize *int `json:"window_size,omitempty"` } +func (s *Rescore) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "window_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.WindowSize = &value + case float64: + f := int(v) + s.WindowSize = &f + } + + } + } + return nil +} + // NewRescore returns a Rescore. func NewRescore() *Rescore { r := &Rescore{} diff --git a/typedapi/types/rescorequery.go b/typedapi/types/rescorequery.go old mode 100755 new mode 100644 index 2c0edac2d3..60e632d97f --- a/typedapi/types/rescorequery.go +++ b/typedapi/types/rescorequery.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/scoremode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // RescoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/rescoring.ts#L28-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/rescoring.ts#L28-L34 type RescoreQuery struct { Query Query `json:"rescore_query"` QueryWeight *Float64 `json:"query_weight,omitempty"` @@ -34,6 +42,68 @@ type RescoreQuery struct { ScoreMode *scoremode.ScoreMode `json:"score_mode,omitempty"` } +func (s *RescoreQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "rescore_query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "query_weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.QueryWeight = &f + case float64: + f := Float64(v) + s.QueryWeight = &f + } + + case "rescore_query_weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.RescoreQueryWeight = &f + case float64: + f := Float64(v) + s.RescoreQueryWeight = &f + } + + case "score_mode": + if err := dec.Decode(&s.ScoreMode); err != nil { + return err + } + + } + } + return nil +} + // NewRescoreQuery returns a RescoreQuery. func NewRescoreQuery() *RescoreQuery { r := &RescoreQuery{} diff --git a/typedapi/types/reservedsize.go b/typedapi/types/reservedsize.go old mode 100755 new mode 100644 index d1545a45f6..ed0d7fb769 --- a/typedapi/types/reservedsize.go +++ b/typedapi/types/reservedsize.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ReservedSize type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L71-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L71-L76 type ReservedSize struct { NodeId string `json:"node_id"` Path string `json:"path"` @@ -30,6 +40,59 @@ type ReservedSize struct { Total int64 `json:"total"` } +func (s *ReservedSize) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = o + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewReservedSize returns a ReservedSize. func NewReservedSize() *ReservedSize { r := &ReservedSize{} diff --git a/typedapi/types/resolveindexaliasitem.go b/typedapi/types/resolveindexaliasitem.go old mode 100755 new mode 100644 index 4d6fd9d72a..495eb8cde5 --- a/typedapi/types/resolveindexaliasitem.go +++ b/typedapi/types/resolveindexaliasitem.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ResolveIndexAliasItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 type ResolveIndexAliasItem struct { Indices []string `json:"indices"` Name string `json:"name"` } +func (s *ResolveIndexAliasItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return err + } + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewResolveIndexAliasItem returns a ResolveIndexAliasItem. func NewResolveIndexAliasItem() *ResolveIndexAliasItem { r := &ResolveIndexAliasItem{} diff --git a/typedapi/types/resolveindexdatastreamsitem.go b/typedapi/types/resolveindexdatastreamsitem.go old mode 100755 new mode 100644 index bf1bef7351..60e980b922 --- a/typedapi/types/resolveindexdatastreamsitem.go +++ b/typedapi/types/resolveindexdatastreamsitem.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ResolveIndexDataStreamsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 type ResolveIndexDataStreamsItem struct { BackingIndices []string `json:"backing_indices"` Name string `json:"name"` TimestampField string `json:"timestamp_field"` } +func (s *ResolveIndexDataStreamsItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "backing_indices": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.BackingIndices = append(s.BackingIndices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.BackingIndices); err != nil { + return err + } + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "timestamp_field": + if err := dec.Decode(&s.TimestampField); err != nil { + return err + } + + } + } + return nil +} + // NewResolveIndexDataStreamsItem returns a ResolveIndexDataStreamsItem. func NewResolveIndexDataStreamsItem() *ResolveIndexDataStreamsItem { r := &ResolveIndexDataStreamsItem{} diff --git a/typedapi/types/resolveindexitem.go b/typedapi/types/resolveindexitem.go old mode 100755 new mode 100644 index f4322192bf..dc1906073e --- a/typedapi/types/resolveindexitem.go +++ b/typedapi/types/resolveindexitem.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ResolveIndexItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 type ResolveIndexItem struct { Aliases []string `json:"aliases,omitempty"` Attributes []string `json:"attributes"` @@ -30,6 +38,46 @@ type ResolveIndexItem struct { Name string `json:"name"` } +func (s *ResolveIndexItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "attributes": + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewResolveIndexItem returns a ResolveIndexItem. func NewResolveIndexItem() *ResolveIndexItem { r := &ResolveIndexItem{} diff --git a/typedapi/types/resourceprivileges.go b/typedapi/types/resourceprivileges.go old mode 100755 new mode 100644 index fb0c5a6988..33f6d7c0bd --- a/typedapi/types/resourceprivileges.go +++ b/typedapi/types/resourceprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ResourcePrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/has_privileges/types.ts#L47-L47 -type ResourcePrivileges map[string]map[string]bool +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/has_privileges/types.ts#L47-L47 +type ResourcePrivileges map[string]Privileges diff --git a/typedapi/types/responsebody.go b/typedapi/types/responsebody.go old mode 100755 new mode 100644 index af9495421c..0ff8fc4eb4 --- a/typedapi/types/responsebody.go +++ b/typedapi/types/responsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // ResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/SearchResponse.ts#L38-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/SearchResponse.ts#L38-L54 type ResponseBody struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -51,6 +53,7 @@ type ResponseBody struct { } func (s *ResponseBody) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -65,6 +68,10 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { switch t { case "aggregations": + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + for dec.More() { tt, err := dec.Token() if err != nil { @@ -77,415 +84,494 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { if strings.Contains(value, "#") { elems := strings.Split(value, "#") if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } switch elems[0] { + case "cardinality": o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentiles": o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "hdr_percentile_ranks": o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentiles": o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "tdigest_percentile_ranks": o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "percentiles_bucket": o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "median_absolute_deviation": o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "min": o := NewMinAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "max": o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sum": o := NewSumAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "avg": o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "weighted_avg": o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "value_count": o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_value": o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "derivative": o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "bucket_metric_value": o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats": o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "stats_bucket": o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats": o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "extended_stats_bucket": o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_bounds": o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_centroid": o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "histogram": o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_histogram": o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "auto_date_histogram": o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "variable_width_histogram": o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sterms": o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lterms": o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "dterms": o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umterms": o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "lrareterms": o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "srareterms": o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umrareterms": o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "multi_terms": o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "missing": o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "nested": o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "reverse_nested": o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "global": o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filter": o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "children": o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "parent": o := NewParentAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sampler": o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "unmapped_sampler": o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohash_grid": o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geotile_grid": o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geohex_grid": o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "range": o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "date_range": o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_distance": o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_range": o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "ip_prefix": o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "filters": o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "adjacency_matrix": o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "siglterms": o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "sigsterms": o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "umsigterms": o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "composite": o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "scripted_metric": o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_hits": o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "inference": o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "string_stats": o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "box_plot": o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "top_metrics": o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "t_test": o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "rate": o := NewRateAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "simple_long_value": o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "matrix_stats": o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + case "geo_line": o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { + if err := dec.Decode(&o); err != nil { return err } s.Aggregations[elems[1]] = o + default: o := make(map[string]interface{}, 0) if err := dec.Decode(&o); err != nil { @@ -512,6 +598,9 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Fields); err != nil { return err } @@ -522,13 +611,34 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "max_score": - if err := dec.Decode(&s.MaxScore); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MaxScore = &f + case float64: + f := Float64(v) + s.MaxScore = &f } case "num_reduce_phases": - if err := dec.Decode(&s.NumReducePhases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumReducePhases = &value + case float64: + f := int64(v) + s.NumReducePhases = &f } case "pit_id": @@ -552,23 +662,54 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "suggest": + if s.Suggest == nil { + s.Suggest = make(map[string][]Suggest, 0) + } if err := dec.Decode(&s.Suggest); err != nil { return err } case "terminated_early": - if err := dec.Decode(&s.TerminatedEarly); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TerminatedEarly = &value + case bool: + s.TerminatedEarly = &v } case "timed_out": - if err := dec.Decode(&s.TimedOut); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = value + case bool: + s.TimedOut = v } case "took": - if err := dec.Decode(&s.Took); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = value + case float64: + f := int64(v) + s.Took = f } } diff --git a/typedapi/types/responseitem.go b/typedapi/types/responseitem.go old mode 100755 new mode 100644 index ee7ec83eef..e1195f167b --- a/typedapi/types/responseitem.go +++ b/typedapi/types/responseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // GetResult // MultiGetError // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mget/types.ts#L57-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mget/types.ts#L57-L60 type ResponseItem interface{} diff --git a/typedapi/types/retention.go b/typedapi/types/retention.go old mode 100755 new mode 100644 index 479d0c22b0..3ef1a450fa --- a/typedapi/types/retention.go +++ b/typedapi/types/retention.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Retention type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 type Retention struct { // ExpireAfter Time period after which a snapshot is considered expired and eligible for // deletion. SLM deletes expired snapshots based on the slm.retention_schedule. @@ -35,6 +45,63 @@ type Retention struct { MinCount int `json:"min_count"` } +func (s *Retention) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expire_after": + if err := dec.Decode(&s.ExpireAfter); err != nil { + return err + } + + case "max_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxCount = value + case float64: + f := int(v) + s.MaxCount = f + } + + case "min_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinCount = value + case float64: + f := int(v) + s.MinCount = f + } + + } + } + return nil +} + // NewRetention returns a Retention. func NewRetention() *Retention { r := &Retention{} diff --git a/typedapi/types/retentionlease.go b/typedapi/types/retentionlease.go old mode 100755 new mode 100644 index b7ce862927..b7f355b0ed --- a/typedapi/types/retentionlease.go +++ b/typedapi/types/retentionlease.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RetentionLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L65-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L65-L67 type RetentionLease struct { Period Duration `json:"period"` } +func (s *RetentionLease) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "period": + if err := dec.Decode(&s.Period); err != nil { + return err + } + + } + } + return nil +} + // NewRetentionLease returns a RetentionLease. func NewRetentionLease() *RetentionLease { r := &RetentionLease{} diff --git a/typedapi/types/retentionpolicy.go b/typedapi/types/retentionpolicy.go old mode 100755 new mode 100644 index 11347eb1a0..96c7dfa33e --- a/typedapi/types/retentionpolicy.go +++ b/typedapi/types/retentionpolicy.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RetentionPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L88-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L88-L96 type RetentionPolicy struct { // Field The date field that is used to calculate the age of the document. Field string `json:"field"` @@ -32,6 +40,36 @@ type RetentionPolicy struct { MaxAge Duration `json:"max_age"` } +func (s *RetentionPolicy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "max_age": + if err := dec.Decode(&s.MaxAge); err != nil { + return err + } + + } + } + return nil +} + // NewRetentionPolicy returns a RetentionPolicy. func NewRetentionPolicy() *RetentionPolicy { r := &RetentionPolicy{} diff --git a/typedapi/types/retentionpolicycontainer.go b/typedapi/types/retentionpolicycontainer.go old mode 100755 new mode 100644 index 0c69a4e012..9781e52e50 --- a/typedapi/types/retentionpolicycontainer.go +++ b/typedapi/types/retentionpolicycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RetentionPolicyContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L80-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L80-L86 type RetentionPolicyContainer struct { // Time Specifies that the transform uses a time field to set the retention policy. Time *RetentionPolicy `json:"time,omitempty"` diff --git a/typedapi/types/retries.go b/typedapi/types/retries.go old mode 100755 new mode 100644 index 9c3c563650..b49e272a30 --- a/typedapi/types/retries.go +++ b/typedapi/types/retries.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Retries type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Retries.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Retries.ts#L22-L25 type Retries struct { Bulk int64 `json:"bulk"` Search int64 `json:"search"` } +func (s *Retries) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bulk": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Bulk = value + case float64: + f := int64(v) + s.Bulk = f + } + + case "search": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Search = value + case float64: + f := int64(v) + s.Search = f + } + + } + } + return nil +} + // NewRetries returns a Retries. func NewRetries() *Retries { r := &Retries{} diff --git a/typedapi/types/reversenestedaggregate.go b/typedapi/types/reversenestedaggregate.go old mode 100755 new mode 100644 index 5568a817fe..43b799b64a --- a/typedapi/types/reversenestedaggregate.go +++ b/typedapi/types/reversenestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // ReverseNestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L488-L489 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L489-L490 type ReverseNestedAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s ReverseNestedAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/reversenestedaggregation.go b/typedapi/types/reversenestedaggregation.go old mode 100755 new mode 100644 index 24caac1b2d..dbe129379b --- a/typedapi/types/reversenestedaggregation.go +++ b/typedapi/types/reversenestedaggregation.go @@ -16,21 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ReverseNestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L314-L316 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L314-L316 type ReverseNestedAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Path *string `json:"path,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` +} + +func (s *ReverseNestedAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + } + } + return nil } // NewReverseNestedAggregation returns a ReverseNestedAggregation. diff --git a/typedapi/types/reversetokenfilter.go b/typedapi/types/reversetokenfilter.go old mode 100755 new mode 100644 index faefae27d9..2511dc410f --- a/typedapi/types/reversetokenfilter.go +++ b/typedapi/types/reversetokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ReverseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L304-L306 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L305-L307 type ReverseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *ReverseTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewReverseTokenFilter returns a ReverseTokenFilter. func NewReverseTokenFilter() *ReverseTokenFilter { r := &ReverseTokenFilter{} diff --git a/typedapi/types/role.go b/typedapi/types/role.go old mode 100755 new mode 100644 index 47b2949da4..80122d1e52 --- a/typedapi/types/role.go +++ b/typedapi/types/role.go @@ -16,28 +16,95 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // Role type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_role/types.ts#L29-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_role/types.ts#L29-L39 type Role struct { Applications []ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` Global map[string]map[string]map[string][]string `json:"global,omitempty"` Indices []IndicesPrivileges `json:"indices"` - Metadata map[string]json.RawMessage `json:"metadata"` + Metadata Metadata `json:"metadata"` RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` RunAs []string `json:"run_as"` TransientMetadata TransientMetadataConfig `json:"transient_metadata"` } +func (s *Role) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "applications": + if err := dec.Decode(&s.Applications); err != nil { + return err + } + + case "cluster": + if err := dec.Decode(&s.Cluster); err != nil { + return err + } + + case "global": + if s.Global == nil { + s.Global = make(map[string]map[string]map[string][]string, 0) + } + if err := dec.Decode(&s.Global); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "role_templates": + if err := dec.Decode(&s.RoleTemplates); err != nil { + return err + } + + case "run_as": + if err := dec.Decode(&s.RunAs); err != nil { + return err + } + + case "transient_metadata": + if err := dec.Decode(&s.TransientMetadata); err != nil { + return err + } + + } + } + return nil +} + // NewRole returns a Role. func NewRole() *Role { r := &Role{ diff --git a/typedapi/types/roledescriptor.go b/typedapi/types/roledescriptor.go old mode 100755 new mode 100644 index eb4f71f9b3..4e27f38953 --- a/typedapi/types/roledescriptor.go +++ b/typedapi/types/roledescriptor.go @@ -16,25 +16,95 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // RoleDescriptor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleDescriptor.ts#L27-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleDescriptor.ts#L27-L36 type RoleDescriptor struct { - Applications []ApplicationPrivileges `json:"applications,omitempty"` - Cluster []string `json:"cluster,omitempty"` - Global []GlobalPrivilege `json:"global,omitempty"` - Indices []IndicesPrivileges `json:"indices,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - RunAs []string `json:"run_as,omitempty"` - TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` + Applications []ApplicationPrivileges `json:"applications,omitempty"` + Cluster []string `json:"cluster,omitempty"` + Global []GlobalPrivilege `json:"global,omitempty"` + Indices []IndicesPrivileges `json:"indices,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + RunAs []string `json:"run_as,omitempty"` + TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` +} + +func (s *RoleDescriptor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "applications": + if err := dec.Decode(&s.Applications); err != nil { + return err + } + + case "cluster": + if err := dec.Decode(&s.Cluster); err != nil { + return err + } + + case "global": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewGlobalPrivilege() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Global = append(s.Global, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Global); err != nil { + return err + } + } + + case "indices", "index": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "run_as": + if err := dec.Decode(&s.RunAs); err != nil { + return err + } + + case "transient_metadata": + if err := dec.Decode(&s.TransientMetadata); err != nil { + return err + } + + } + } + return nil } // NewRoleDescriptor returns a RoleDescriptor. diff --git a/typedapi/types/roledescriptorread.go b/typedapi/types/roledescriptorread.go old mode 100755 new mode 100644 index 39ad9a9c80..f4da9676e8 --- a/typedapi/types/roledescriptorread.go +++ b/typedapi/types/roledescriptorread.go @@ -16,25 +16,95 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // RoleDescriptorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleDescriptor.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleDescriptor.ts#L38-L47 type RoleDescriptorRead struct { - Applications []ApplicationPrivileges `json:"applications,omitempty"` - Cluster []string `json:"cluster"` - Global []GlobalPrivilege `json:"global,omitempty"` - Indices []IndicesPrivileges `json:"indices"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - RunAs []string `json:"run_as,omitempty"` - TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` + Applications []ApplicationPrivileges `json:"applications,omitempty"` + Cluster []string `json:"cluster"` + Global []GlobalPrivilege `json:"global,omitempty"` + Indices []IndicesPrivileges `json:"indices"` + Metadata Metadata `json:"metadata,omitempty"` + RunAs []string `json:"run_as,omitempty"` + TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` +} + +func (s *RoleDescriptorRead) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "applications": + if err := dec.Decode(&s.Applications); err != nil { + return err + } + + case "cluster": + if err := dec.Decode(&s.Cluster); err != nil { + return err + } + + case "global": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewGlobalPrivilege() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Global = append(s.Global, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Global); err != nil { + return err + } + } + + case "indices", "index": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "run_as": + if err := dec.Decode(&s.RunAs); err != nil { + return err + } + + case "transient_metadata": + if err := dec.Decode(&s.TransientMetadata); err != nil { + return err + } + + } + } + return nil } // NewRoleDescriptorRead returns a RoleDescriptorRead. diff --git a/typedapi/types/roledescriptorwrapper.go b/typedapi/types/roledescriptorwrapper.go old mode 100755 new mode 100644 index 67599ea0db..940fcde033 --- a/typedapi/types/roledescriptorwrapper.go +++ b/typedapi/types/roledescriptorwrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RoleDescriptorWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_service_accounts/types.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_service_accounts/types.ts#L22-L24 type RoleDescriptorWrapper struct { RoleDescriptor RoleDescriptorRead `json:"role_descriptor"` } diff --git a/typedapi/types/rolemappingrule.go b/typedapi/types/rolemappingrule.go old mode 100755 new mode 100644 index 7fa431935d..44ee24a7a5 --- a/typedapi/types/rolemappingrule.go +++ b/typedapi/types/rolemappingrule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RoleMappingRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleMappingRule.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleMappingRule.ts#L23-L31 type RoleMappingRule struct { All []RoleMappingRule `json:"all,omitempty"` Any []RoleMappingRule `json:"any,omitempty"` diff --git a/typedapi/types/roletemplate.go b/typedapi/types/roletemplate.go old mode 100755 new mode 100644 index 4108aafc4c..b0e22ddaf0 --- a/typedapi/types/roletemplate.go +++ b/typedapi/types/roletemplate.go @@ -16,22 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/templateformat" + + "bytes" + "errors" + "io" + + "encoding/json" ) // RoleTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_role/types.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_role/types.ts#L47-L50 type RoleTemplate struct { Format *templateformat.TemplateFormat `json:"format,omitempty"` Template Script `json:"template"` } +func (s *RoleTemplate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "format": + if err := dec.Decode(&s.Format); err != nil { + return err + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + } + } + return nil +} + // NewRoleTemplate returns a RoleTemplate. func NewRoleTemplate() *RoleTemplate { r := &RoleTemplate{} diff --git a/typedapi/types/roletemplateinlinequery.go b/typedapi/types/roletemplateinlinequery.go old mode 100755 new mode 100644 index 2396bf3814..1379400222 --- a/typedapi/types/roletemplateinlinequery.go +++ b/typedapi/types/roletemplateinlinequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // Query // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L159-L160 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L160-L161 type RoleTemplateInlineQuery interface{} diff --git a/typedapi/types/roletemplateinlinescript.go b/typedapi/types/roletemplateinlinescript.go old mode 100755 new mode 100644 index 1409adb754..158411c570 --- a/typedapi/types/roletemplateinlinescript.go +++ b/typedapi/types/roletemplateinlinescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,7 +32,7 @@ import ( // RoleTemplateInlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L152-L157 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L153-L158 type RoleTemplateInlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` @@ -41,6 +41,12 @@ type RoleTemplateInlineScript struct { } func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Source) + return err + } + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -60,16 +66,23 @@ func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error { } case "options": + if s.Options == nil { + s.Options = make(map[string]string, 0) + } if err := dec.Decode(&s.Options); err != nil { return err } case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } if err := dec.Decode(&s.Params); err != nil { return err } case "source": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) diff --git a/typedapi/types/roletemplatequery.go b/typedapi/types/roletemplatequery.go old mode 100755 new mode 100644 index 6d36cebe6b..6f3e9e0365 --- a/typedapi/types/roletemplatequery.go +++ b/typedapi/types/roletemplatequery.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RoleTemplateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L140-L150 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L141-L151 type RoleTemplateQuery struct { // Template When you create a role, you can specify a query that defines the document // level security permissions. You can optionally @@ -35,6 +43,31 @@ type RoleTemplateQuery struct { Template RoleTemplateScript `json:"template,omitempty"` } +func (s *RoleTemplateQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + } + } + return nil +} + // NewRoleTemplateQuery returns a RoleTemplateQuery. func NewRoleTemplateQuery() *RoleTemplateQuery { r := &RoleTemplateQuery{} diff --git a/typedapi/types/roletemplatescript.go b/typedapi/types/roletemplatescript.go old mode 100755 new mode 100644 index 8ab4c91e7a..0b77d29fa5 --- a/typedapi/types/roletemplatescript.go +++ b/typedapi/types/roletemplatescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // RoleTemplateInlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L162-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L163-L164 type RoleTemplateScript interface{} diff --git a/typedapi/types/rolloverconditions.go b/typedapi/types/rolloverconditions.go old mode 100755 new mode 100644 index 1179759b74..2918a3a1fd --- a/typedapi/types/rolloverconditions.go +++ b/typedapi/types/rolloverconditions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RolloverConditions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/rollover/types.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/rollover/types.ts#L24-L40 type RolloverConditions struct { MaxAge Duration `json:"max_age,omitempty"` MaxAgeMillis *int64 `json:"max_age_millis,omitempty"` @@ -41,6 +51,181 @@ type RolloverConditions struct { MinSizeBytes *int64 `json:"min_size_bytes,omitempty"` } +func (s *RolloverConditions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_age": + if err := dec.Decode(&s.MaxAge); err != nil { + return err + } + + case "max_age_millis": + if err := dec.Decode(&s.MaxAgeMillis); err != nil { + return err + } + + case "max_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxDocs = &value + case float64: + f := int64(v) + s.MaxDocs = &f + } + + case "max_primary_shard_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxPrimaryShardDocs = &value + case float64: + f := int64(v) + s.MaxPrimaryShardDocs = &f + } + + case "max_primary_shard_size": + if err := dec.Decode(&s.MaxPrimaryShardSize); err != nil { + return err + } + + case "max_primary_shard_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxPrimaryShardSizeBytes = &value + case float64: + f := int64(v) + s.MaxPrimaryShardSizeBytes = &f + } + + case "max_size": + if err := dec.Decode(&s.MaxSize); err != nil { + return err + } + + case "max_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxSizeBytes = &value + case float64: + f := int64(v) + s.MaxSizeBytes = &f + } + + case "min_age": + if err := dec.Decode(&s.MinAge); err != nil { + return err + } + + case "min_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocs = &value + case float64: + f := int64(v) + s.MinDocs = &f + } + + case "min_primary_shard_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinPrimaryShardDocs = &value + case float64: + f := int64(v) + s.MinPrimaryShardDocs = &f + } + + case "min_primary_shard_size": + if err := dec.Decode(&s.MinPrimaryShardSize); err != nil { + return err + } + + case "min_primary_shard_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinPrimaryShardSizeBytes = &value + case float64: + f := int64(v) + s.MinPrimaryShardSizeBytes = &f + } + + case "min_size": + if err := dec.Decode(&s.MinSize); err != nil { + return err + } + + case "min_size_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinSizeBytes = &value + case float64: + f := int64(v) + s.MinSizeBytes = &f + } + + } + } + return nil +} + // NewRolloverConditions returns a RolloverConditions. func NewRolloverConditions() *RolloverConditions { r := &RolloverConditions{} diff --git a/typedapi/types/rollupcapabilities.go b/typedapi/types/rollupcapabilities.go old mode 100755 new mode 100644 index b005354bd1..2762f0cfcc --- a/typedapi/types/rollupcapabilities.go +++ b/typedapi/types/rollupcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RollupCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_caps/types.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_caps/types.ts#L25-L27 type RollupCapabilities struct { RollupJobs []RollupCapabilitySummary `json:"rollup_jobs"` } diff --git a/typedapi/types/rollupcapabilitysummary.go b/typedapi/types/rollupcapabilitysummary.go old mode 100755 new mode 100644 index 7611f212c2..1b3f75ec9a --- a/typedapi/types/rollupcapabilitysummary.go +++ b/typedapi/types/rollupcapabilitysummary.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RollupCapabilitySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_caps/types.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_caps/types.ts#L29-L34 type RollupCapabilitySummary struct { Fields map[string][]RollupFieldSummary `json:"fields"` IndexPattern string `json:"index_pattern"` @@ -30,6 +38,58 @@ type RollupCapabilitySummary struct { RollupIndex string `json:"rollup_index"` } +func (s *RollupCapabilitySummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string][]RollupFieldSummary, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "index_pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexPattern = o + + case "job_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.JobId = o + + case "rollup_index": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RollupIndex = o + + } + } + return nil +} + // NewRollupCapabilitySummary returns a RollupCapabilitySummary. func NewRollupCapabilitySummary() *RollupCapabilitySummary { r := &RollupCapabilitySummary{ diff --git a/typedapi/types/rollupfieldsummary.go b/typedapi/types/rollupfieldsummary.go old mode 100755 new mode 100644 index f7cd91f1e0..56cb82aab8 --- a/typedapi/types/rollupfieldsummary.go +++ b/typedapi/types/rollupfieldsummary.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RollupFieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_caps/types.ts#L36-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_caps/types.ts#L36-L40 type RollupFieldSummary struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` TimeZone *string `json:"time_zone,omitempty"` } +func (s *RollupFieldSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "agg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Agg = o + + case "calendar_interval": + if err := dec.Decode(&s.CalendarInterval); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + } + } + return nil +} + // NewRollupFieldSummary returns a RollupFieldSummary. func NewRollupFieldSummary() *RollupFieldSummary { r := &RollupFieldSummary{} diff --git a/typedapi/types/rollupjob.go b/typedapi/types/rollupjob.go old mode 100755 new mode 100644 index f2a7371ee4..23508607fb --- a/typedapi/types/rollupjob.go +++ b/typedapi/types/rollupjob.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RollupJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/types.ts#L28-L32 type RollupJob struct { Config RollupJobConfiguration `json:"config"` Stats RollupJobStats `json:"stats"` diff --git a/typedapi/types/rollupjobconfiguration.go b/typedapi/types/rollupjobconfiguration.go old mode 100755 new mode 100644 index fef0b8ba5b..c54d487f60 --- a/typedapi/types/rollupjobconfiguration.go +++ b/typedapi/types/rollupjobconfiguration.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RollupJobConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/types.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/types.ts#L34-L43 type RollupJobConfiguration struct { Cron string `json:"cron"` Groups Groupings `json:"groups"` @@ -34,6 +44,82 @@ type RollupJobConfiguration struct { Timeout Duration `json:"timeout"` } +func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cron": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Cron = o + + case "groups": + if err := dec.Decode(&s.Groups); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index_pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexPattern = o + + case "metrics": + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + case "page_size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PageSize = value + case float64: + f := int64(v) + s.PageSize = f + } + + case "rollup_index": + if err := dec.Decode(&s.RollupIndex); err != nil { + return err + } + + case "timeout": + if err := dec.Decode(&s.Timeout); err != nil { + return err + } + + } + } + return nil +} + // NewRollupJobConfiguration returns a RollupJobConfiguration. func NewRollupJobConfiguration() *RollupJobConfiguration { r := &RollupJobConfiguration{} diff --git a/typedapi/types/rollupjobstats.go b/typedapi/types/rollupjobstats.go old mode 100755 new mode 100644 index f6b45b94ab..ad4e5eb4c5 --- a/typedapi/types/rollupjobstats.go +++ b/typedapi/types/rollupjobstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RollupJobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/types.ts#L45-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/types.ts#L45-L58 type RollupJobStats struct { DocumentsProcessed int64 `json:"documents_processed"` IndexFailures int64 `json:"index_failures"` @@ -38,6 +48,176 @@ type RollupJobStats struct { TriggerCount int64 `json:"trigger_count"` } +func (s *RollupJobStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "documents_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocumentsProcessed = value + case float64: + f := int64(v) + s.DocumentsProcessed = f + } + + case "index_failures": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexFailures = value + case float64: + f := int64(v) + s.IndexFailures = f + } + + case "index_time_in_ms": + if err := dec.Decode(&s.IndexTimeInMs); err != nil { + return err + } + + case "index_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexTotal = value + case float64: + f := int64(v) + s.IndexTotal = f + } + + case "pages_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PagesProcessed = value + case float64: + f := int64(v) + s.PagesProcessed = f + } + + case "processing_time_in_ms": + if err := dec.Decode(&s.ProcessingTimeInMs); err != nil { + return err + } + + case "processing_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ProcessingTotal = value + case float64: + f := int64(v) + s.ProcessingTotal = f + } + + case "rollups_indexed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RollupsIndexed = value + case float64: + f := int64(v) + s.RollupsIndexed = f + } + + case "search_failures": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SearchFailures = value + case float64: + f := int64(v) + s.SearchFailures = f + } + + case "search_time_in_ms": + if err := dec.Decode(&s.SearchTimeInMs); err != nil { + return err + } + + case "search_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SearchTotal = value + case float64: + f := int64(v) + s.SearchTotal = f + } + + case "trigger_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TriggerCount = value + case float64: + f := int64(v) + s.TriggerCount = f + } + + } + } + return nil +} + // NewRollupJobStats returns a RollupJobStats. func NewRollupJobStats() *RollupJobStats { r := &RollupJobStats{} diff --git a/typedapi/types/rollupjobstatus.go b/typedapi/types/rollupjobstatus.go old mode 100755 new mode 100644 index 1fa79f0407..fda471340f --- a/typedapi/types/rollupjobstatus.go +++ b/typedapi/types/rollupjobstatus.go @@ -16,25 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexingjobstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // RollupJobStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_jobs/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_jobs/types.ts#L60-L64 type RollupJobStatus struct { CurrentPosition map[string]json.RawMessage `json:"current_position,omitempty"` JobState indexingjobstate.IndexingJobState `json:"job_state"` UpgradedDocId *bool `json:"upgraded_doc_id,omitempty"` } +func (s *RollupJobStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current_position": + if s.CurrentPosition == nil { + s.CurrentPosition = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.CurrentPosition); err != nil { + return err + } + + case "job_state": + if err := dec.Decode(&s.JobState); err != nil { + return err + } + + case "upgraded_doc_id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UpgradedDocId = &value + case bool: + s.UpgradedDocId = &v + } + + } + } + return nil +} + // NewRollupJobStatus returns a RollupJobStatus. func NewRollupJobStatus() *RollupJobStatus { r := &RollupJobStatus{ diff --git a/typedapi/types/rollupjobsummary.go b/typedapi/types/rollupjobsummary.go old mode 100755 new mode 100644 index 946e9cfa85..9ca754351a --- a/typedapi/types/rollupjobsummary.go +++ b/typedapi/types/rollupjobsummary.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RollupJobSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 type RollupJobSummary struct { Fields map[string][]RollupJobSummaryField `json:"fields"` IndexPattern string `json:"index_pattern"` @@ -30,6 +38,52 @@ type RollupJobSummary struct { RollupIndex string `json:"rollup_index"` } +func (s *RollupJobSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string][]RollupJobSummaryField, 0) + } + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "index_pattern": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexPattern = o + + case "job_id": + if err := dec.Decode(&s.JobId); err != nil { + return err + } + + case "rollup_index": + if err := dec.Decode(&s.RollupIndex); err != nil { + return err + } + + } + } + return nil +} + // NewRollupJobSummary returns a RollupJobSummary. func NewRollupJobSummary() *RollupJobSummary { r := &RollupJobSummary{ diff --git a/typedapi/types/rollupjobsummaryfield.go b/typedapi/types/rollupjobsummaryfield.go old mode 100755 new mode 100644 index 7179b80e69..0b0f30fcf7 --- a/typedapi/types/rollupjobsummaryfield.go +++ b/typedapi/types/rollupjobsummaryfield.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RollupJobSummaryField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 type RollupJobSummaryField struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` TimeZone *string `json:"time_zone,omitempty"` } +func (s *RollupJobSummaryField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "agg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Agg = o + + case "calendar_interval": + if err := dec.Decode(&s.CalendarInterval); err != nil { + return err + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return err + } + + } + } + return nil +} + // NewRollupJobSummaryField returns a RollupJobSummaryField. func NewRollupJobSummaryField() *RollupJobSummaryField { r := &RollupJobSummaryField{} diff --git a/typedapi/types/routingfield.go b/typedapi/types/routingfield.go old mode 100755 new mode 100644 index a4709ee329..fb0d902cdb --- a/typedapi/types/routingfield.go +++ b/typedapi/types/routingfield.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RoutingField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L50-L52 type RoutingField struct { Required bool `json:"required"` } +func (s *RoutingField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "required": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Required = value + case bool: + s.Required = v + } + + } + } + return nil +} + // NewRoutingField returns a RoutingField. func NewRoutingField() *RoutingField { r := &RoutingField{} diff --git a/typedapi/types/row.go b/typedapi/types/row.go old mode 100755 new mode 100644 index 30814c00c9..32d6a04e94 --- a/typedapi/types/row.go +++ b/typedapi/types/row.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,5 +24,5 @@ import "encoding/json" // Row type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/sql/types.ts#L28-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/sql/types.ts#L28-L28 type Row []json.RawMessage diff --git a/typedapi/types/rulecondition.go b/typedapi/types/rulecondition.go old mode 100755 new mode 100644 index 7eab564073..0138f74533 --- a/typedapi/types/rulecondition.go +++ b/typedapi/types/rulecondition.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/appliesto" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/conditionoperator" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // RuleCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Rule.ts#L52-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Rule.ts#L52-L65 type RuleCondition struct { // AppliesTo Specifies the result property to which the condition applies. If your // detector uses `lat_long`, `metric`, `rare`, or `freq_rare` functions, you can @@ -40,6 +48,52 @@ type RuleCondition struct { Value Float64 `json:"value"` } +func (s *RuleCondition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "applies_to": + if err := dec.Decode(&s.AppliesTo); err != nil { + return err + } + + case "operator": + if err := dec.Decode(&s.Operator); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Value = f + case float64: + f := Float64(v) + s.Value = f + } + + } + } + return nil +} + // NewRuleCondition returns a RuleCondition. func NewRuleCondition() *RuleCondition { r := &RuleCondition{} diff --git a/typedapi/types/runningstatesearchinterval.go b/typedapi/types/runningstatesearchinterval.go old mode 100755 new mode 100644 index 78e1864682..c38c0e40ee --- a/typedapi/types/runningstatesearchinterval.go +++ b/typedapi/types/runningstatesearchinterval.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RunningStateSearchInterval type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Datafeed.ts#L164-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Datafeed.ts#L164-L169 type RunningStateSearchInterval struct { End Duration `json:"end,omitempty"` EndMs int64 `json:"end_ms"` @@ -30,6 +38,46 @@ type RunningStateSearchInterval struct { StartMs int64 `json:"start_ms"` } +func (s *RunningStateSearchInterval) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "end": + if err := dec.Decode(&s.End); err != nil { + return err + } + + case "end_ms": + if err := dec.Decode(&s.EndMs); err != nil { + return err + } + + case "start": + if err := dec.Decode(&s.Start); err != nil { + return err + } + + case "start_ms": + if err := dec.Decode(&s.StartMs); err != nil { + return err + } + + } + } + return nil +} + // NewRunningStateSearchInterval returns a RunningStateSearchInterval. func NewRunningStateSearchInterval() *RunningStateSearchInterval { r := &RunningStateSearchInterval{} diff --git a/typedapi/types/runtimefield.go b/typedapi/types/runtimefield.go old mode 100755 new mode 100644 index 66f990bac7..576abfb072 --- a/typedapi/types/runtimefield.go +++ b/typedapi/types/runtimefield.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/runtimefieldtype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // RuntimeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/RuntimeFields.ts#L26-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/RuntimeFields.ts#L26-L38 type RuntimeField struct { // FetchFields For type `lookup` FetchFields []RuntimeFieldFetchFields `json:"fetch_fields,omitempty"` @@ -41,6 +47,64 @@ type RuntimeField struct { Type runtimefieldtype.RuntimeFieldType `json:"type"` } +func (s *RuntimeField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fetch_fields": + if err := dec.Decode(&s.FetchFields); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "input_field": + if err := dec.Decode(&s.InputField); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + case "target_index": + if err := dec.Decode(&s.TargetIndex); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewRuntimeField returns a RuntimeField. func NewRuntimeField() *RuntimeField { r := &RuntimeField{} diff --git a/typedapi/types/runtimefieldfetchfields.go b/typedapi/types/runtimefieldfetchfields.go old mode 100755 new mode 100644 index 7659306702..2e5924fa52 --- a/typedapi/types/runtimefieldfetchfields.go +++ b/typedapi/types/runtimefieldfetchfields.go @@ -16,18 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // RuntimeFieldFetchFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/RuntimeFields.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/RuntimeFields.ts#L40-L44 type RuntimeFieldFetchFields struct { Field string `json:"field"` Format *string `json:"format,omitempty"` } +func (s *RuntimeFieldFetchFields) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Field) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + } + } + return nil +} + // NewRuntimeFieldFetchFields returns a RuntimeFieldFetchFields. func NewRuntimeFieldFetchFields() *RuntimeFieldFetchFields { r := &RuntimeFieldFetchFields{} diff --git a/typedapi/types/runtimefields.go b/typedapi/types/runtimefields.go old mode 100755 new mode 100644 index c9609b0ff4..08f20a6e84 --- a/typedapi/types/runtimefields.go +++ b/typedapi/types/runtimefields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // RuntimeFields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/RuntimeFields.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/RuntimeFields.ts#L24-L24 type RuntimeFields map[string]RuntimeField diff --git a/typedapi/types/runtimefieldstype.go b/typedapi/types/runtimefieldstype.go old mode 100755 new mode 100644 index 81adb125f1..24cb8c052d --- a/typedapi/types/runtimefieldstype.go +++ b/typedapi/types/runtimefieldstype.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // RuntimeFieldsType type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L273-L288 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L273-L288 type RuntimeFieldsType struct { CharsMax int64 `json:"chars_max"` CharsTotal int64 `json:"chars_total"` @@ -40,6 +50,216 @@ type RuntimeFieldsType struct { SourceTotal int64 `json:"source_total"` } +func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "chars_max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CharsMax = value + case float64: + f := int64(v) + s.CharsMax = f + } + + case "chars_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CharsTotal = value + case float64: + f := int64(v) + s.CharsTotal = f + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "doc_max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocMax = value + case float64: + f := int64(v) + s.DocMax = f + } + + case "doc_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocTotal = value + case float64: + f := int64(v) + s.DocTotal = f + } + + case "index_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexCount = value + case float64: + f := int64(v) + s.IndexCount = f + } + + case "lang": + if err := dec.Decode(&s.Lang); err != nil { + return err + } + + case "lines_max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LinesMax = value + case float64: + f := int64(v) + s.LinesMax = f + } + + case "lines_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LinesTotal = value + case float64: + f := int64(v) + s.LinesTotal = f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "scriptless_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ScriptlessCount = value + case float64: + f := int64(v) + s.ScriptlessCount = f + } + + case "shadowed_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShadowedCount = value + case float64: + f := int64(v) + s.ShadowedCount = f + } + + case "source_max": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SourceMax = value + case float64: + f := int64(v) + s.SourceMax = f + } + + case "source_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SourceTotal = value + case float64: + f := int64(v) + s.SourceTotal = f + } + + } + } + return nil +} + // NewRuntimeFieldsType returns a RuntimeFieldsType. func NewRuntimeFieldsType() *RuntimeFieldsType { r := &RuntimeFieldsType{} diff --git a/typedapi/types/samplediversity.go b/typedapi/types/samplediversity.go old mode 100755 new mode 100644 index b173d2bac3..609db601fb --- a/typedapi/types/samplediversity.go +++ b/typedapi/types/samplediversity.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SampleDiversity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/ExploreControls.ts#L31-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/ExploreControls.ts#L31-L34 type SampleDiversity struct { Field string `json:"field"` MaxDocsPerValue int `json:"max_docs_per_value"` } +func (s *SampleDiversity) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "max_docs_per_value": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDocsPerValue = value + case float64: + f := int(v) + s.MaxDocsPerValue = f + } + + } + } + return nil +} + // NewSampleDiversity returns a SampleDiversity. func NewSampleDiversity() *SampleDiversity { r := &SampleDiversity{} diff --git a/typedapi/types/sampleraggregate.go b/typedapi/types/sampleraggregate.go old mode 100755 new mode 100644 index 57f239a3bb..c97f10979d --- a/typedapi/types/sampleraggregate.go +++ b/typedapi/types/sampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // SamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L497-L498 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L498-L499 type SamplerAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s SamplerAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/sampleraggregation.go b/typedapi/types/sampleraggregation.go old mode 100755 new mode 100644 index dc6097eced..2ec269a84e --- a/typedapi/types/sampleraggregation.go +++ b/typedapi/types/sampleraggregation.go @@ -16,21 +16,76 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // SamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L318-L320 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L318-L320 type SamplerAggregation struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - ShardSize *int `json:"shard_size,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + ShardSize *int `json:"shard_size,omitempty"` +} + +func (s *SamplerAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + } + } + return nil } // NewSamplerAggregation returns a SamplerAggregation. diff --git a/typedapi/types/scaledfloatnumberproperty.go b/typedapi/types/scaledfloatnumberproperty.go old mode 100755 new mode 100644 index e7b6237e0f..4b4ef14401 --- a/typedapi/types/scaledfloatnumberproperty.go +++ b/typedapi/types/scaledfloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ScaledFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L171-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L171-L175 type ScaledFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -64,6 +66,7 @@ type ScaledFloatNumberProperty struct { } func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -78,23 +81,63 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -103,6 +146,9 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -390,35 +436,78 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.NullValue = &f + case float64: + f := Float64(v) + s.NullValue = &f } case "on_script_error": @@ -427,6 +516,9 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -714,15 +806,26 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "scaling_factor": - if err := dec.Decode(&s.ScalingFactor); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ScalingFactor = &f + case float64: + f := Float64(v) + s.ScalingFactor = &f } case "script": @@ -731,18 +834,39 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/schedulecontainer.go b/typedapi/types/schedulecontainer.go old mode 100755 new mode 100644 index 585173575b..cd1a53fb79 --- a/typedapi/types/schedulecontainer.go +++ b/typedapi/types/schedulecontainer.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ScheduleContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L85-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L85-L96 type ScheduleContainer struct { Cron *string `json:"cron,omitempty"` Daily *DailySchedule `json:"daily,omitempty"` @@ -33,6 +41,94 @@ type ScheduleContainer struct { Yearly []TimeOfYear `json:"yearly,omitempty"` } +func (s *ScheduleContainer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cron": + if err := dec.Decode(&s.Cron); err != nil { + return err + } + + case "daily": + if err := dec.Decode(&s.Daily); err != nil { + return err + } + + case "hourly": + if err := dec.Decode(&s.Hourly); err != nil { + return err + } + + case "interval": + if err := dec.Decode(&s.Interval); err != nil { + return err + } + + case "monthly": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewTimeOfMonth() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Monthly = append(s.Monthly, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Monthly); err != nil { + return err + } + } + + case "weekly": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewTimeOfWeek() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Weekly = append(s.Weekly, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Weekly); err != nil { + return err + } + } + + case "yearly": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewTimeOfYear() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Yearly = append(s.Yearly, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Yearly); err != nil { + return err + } + } + + } + } + return nil +} + // NewScheduleContainer returns a ScheduleContainer. func NewScheduleContainer() *ScheduleContainer { r := &ScheduleContainer{} diff --git a/typedapi/types/scheduletimeofday.go b/typedapi/types/scheduletimeofday.go old mode 100755 new mode 100644 index 9171ecb134..28e81e0ebb --- a/typedapi/types/scheduletimeofday.go +++ b/typedapi/types/scheduletimeofday.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // HourAndMinute // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L103-L108 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L103-L108 type ScheduleTimeOfDay interface{} diff --git a/typedapi/types/scheduletriggerevent.go b/typedapi/types/scheduletriggerevent.go old mode 100755 new mode 100644 index 1ee9475aed..181e3d3a9c --- a/typedapi/types/scheduletriggerevent.go +++ b/typedapi/types/scheduletriggerevent.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ScheduleTriggerEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L98-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L98-L101 type ScheduleTriggerEvent struct { ScheduledTime DateTime `json:"scheduled_time"` TriggeredTime DateTime `json:"triggered_time,omitempty"` } +func (s *ScheduleTriggerEvent) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "scheduled_time": + if err := dec.Decode(&s.ScheduledTime); err != nil { + return err + } + + case "triggered_time": + if err := dec.Decode(&s.TriggeredTime); err != nil { + return err + } + + } + } + return nil +} + // NewScheduleTriggerEvent returns a ScheduleTriggerEvent. func NewScheduleTriggerEvent() *ScheduleTriggerEvent { r := &ScheduleTriggerEvent{} diff --git a/typedapi/types/scoresort.go b/typedapi/types/scoresort.go old mode 100755 new mode 100644 index e298008cea..075d2a2c78 --- a/typedapi/types/scoresort.go +++ b/typedapi/types/scoresort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // ScoreSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L55-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L55-L57 type ScoreSort struct { Order *sortorder.SortOrder `json:"order,omitempty"` } diff --git a/typedapi/types/script.go b/typedapi/types/script.go old mode 100755 new mode 100644 index 42f653997b..d2bc973610 --- a/typedapi/types/script.go +++ b/typedapi/types/script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // InlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L56-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L56-L57 type Script interface{} diff --git a/typedapi/types/scriptcache.go b/typedapi/types/scriptcache.go old mode 100755 new mode 100644 index 77ef48bcb1..eef0ff485d --- a/typedapi/types/scriptcache.go +++ b/typedapi/types/scriptcache.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ScriptCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L413-L418 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L413-L418 type ScriptCache struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` @@ -30,6 +40,79 @@ type ScriptCache struct { Context *string `json:"context,omitempty"` } +func (s *ScriptCache) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CacheEvictions = &value + case float64: + f := int64(v) + s.CacheEvictions = &f + } + + case "compilation_limit_triggered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CompilationLimitTriggered = &value + case float64: + f := int64(v) + s.CompilationLimitTriggered = &f + } + + case "compilations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Compilations = &value + case float64: + f := int64(v) + s.Compilations = &f + } + + case "context": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Context = &o + + } + } + return nil +} + // NewScriptCache returns a ScriptCache. func NewScriptCache() *ScriptCache { r := &ScriptCache{} diff --git a/typedapi/types/scriptcondition.go b/typedapi/types/scriptcondition.go old mode 100755 new mode 100644 index 2958c445a3..d84f6e331c --- a/typedapi/types/scriptcondition.go +++ b/typedapi/types/scriptcondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // ScriptCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L76-L84 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L76-L84 type ScriptCondition struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` diff --git a/typedapi/types/scriptedheuristic.go b/typedapi/types/scriptedheuristic.go old mode 100755 new mode 100644 index c57fe9be0d..6673a2bf8a --- a/typedapi/types/scriptedheuristic.go +++ b/typedapi/types/scriptedheuristic.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ScriptedHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L338-L340 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L338-L340 type ScriptedHeuristic struct { Script Script `json:"script"` } +func (s *ScriptedHeuristic) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptedHeuristic returns a ScriptedHeuristic. func NewScriptedHeuristic() *ScriptedHeuristic { r := &ScriptedHeuristic{} diff --git a/typedapi/types/scriptedmetricaggregate.go b/typedapi/types/scriptedmetricaggregate.go old mode 100755 new mode 100644 index f5540c0397..c5c1d5a86d --- a/typedapi/types/scriptedmetricaggregate.go +++ b/typedapi/types/scriptedmetricaggregate.go @@ -16,20 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ScriptedMetricAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L640-L643 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L649-L652 type ScriptedMetricAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Value json.RawMessage `json:"value,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Value json.RawMessage `json:"value,omitempty"` +} + +func (s *ScriptedMetricAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + } + } + return nil } // NewScriptedMetricAggregate returns a ScriptedMetricAggregate. diff --git a/typedapi/types/scriptedmetricaggregation.go b/typedapi/types/scriptedmetricaggregation.go old mode 100755 new mode 100644 index 659b4af769..9dba905818 --- a/typedapi/types/scriptedmetricaggregation.go +++ b/typedapi/types/scriptedmetricaggregation.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ScriptedMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L137-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L137-L143 type ScriptedMetricAggregation struct { CombineScript Script `json:"combine_script,omitempty"` Field *string `json:"field,omitempty"` @@ -38,6 +42,69 @@ type ScriptedMetricAggregation struct { Script Script `json:"script,omitempty"` } +func (s *ScriptedMetricAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "combine_script": + if err := dec.Decode(&s.CombineScript); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "init_script": + if err := dec.Decode(&s.InitScript); err != nil { + return err + } + + case "map_script": + if err := dec.Decode(&s.MapScript); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "reduce_script": + if err := dec.Decode(&s.ReduceScript); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptedMetricAggregation returns a ScriptedMetricAggregation. func NewScriptedMetricAggregation() *ScriptedMetricAggregation { r := &ScriptedMetricAggregation{ diff --git a/typedapi/types/scriptfield.go b/typedapi/types/scriptfield.go old mode 100755 new mode 100644 index 69ca0d6b83..61204b163c --- a/typedapi/types/scriptfield.go +++ b/typedapi/types/scriptfield.go @@ -16,18 +16,67 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ScriptField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L59-L62 type ScriptField struct { IgnoreFailure *bool `json:"ignore_failure,omitempty"` Script Script `json:"script"` } +func (s *ScriptField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptField returns a ScriptField. func NewScriptField() *ScriptField { r := &ScriptField{} diff --git a/typedapi/types/scripting.go b/typedapi/types/scripting.go old mode 100755 new mode 100644 index ff61588a35..89676e12fd --- a/typedapi/types/scripting.go +++ b/typedapi/types/scripting.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Scripting type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L389-L395 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L389-L395 type Scripting struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` @@ -31,6 +41,84 @@ type Scripting struct { Contexts []NodesContext `json:"contexts,omitempty"` } +func (s *Scripting) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CacheEvictions = &value + case float64: + f := int64(v) + s.CacheEvictions = &f + } + + case "compilation_limit_triggered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CompilationLimitTriggered = &value + case float64: + f := int64(v) + s.CompilationLimitTriggered = &f + } + + case "compilations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Compilations = &value + case float64: + f := int64(v) + s.Compilations = &f + } + + case "compilations_history": + if s.CompilationsHistory == nil { + s.CompilationsHistory = make(map[string]int64, 0) + } + if err := dec.Decode(&s.CompilationsHistory); err != nil { + return err + } + + case "contexts": + if err := dec.Decode(&s.Contexts); err != nil { + return err + } + + } + } + return nil +} + // NewScripting returns a Scripting. func NewScripting() *Scripting { r := &Scripting{ diff --git a/typedapi/types/scriptquery.go b/typedapi/types/scriptquery.go old mode 100755 new mode 100644 index 12790a5023..90e7e1f7f8 --- a/typedapi/types/scriptquery.go +++ b/typedapi/types/scriptquery.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ScriptQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L164-L166 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L164-L166 type ScriptQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Script Script `json:"script"` } +func (s *ScriptQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptQuery returns a ScriptQuery. func NewScriptQuery() *ScriptQuery { r := &ScriptQuery{} diff --git a/typedapi/types/scriptscorefunction.go b/typedapi/types/scriptscorefunction.go old mode 100755 new mode 100644 index 9897889673..339964ff73 --- a/typedapi/types/scriptscorefunction.go +++ b/typedapi/types/scriptscorefunction.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ScriptScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/compound.ts#L61-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/compound.ts#L61-L63 type ScriptScoreFunction struct { Script Script `json:"script"` } +func (s *ScriptScoreFunction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptScoreFunction returns a ScriptScoreFunction. func NewScriptScoreFunction() *ScriptScoreFunction { r := &ScriptScoreFunction{} diff --git a/typedapi/types/scriptscorequery.go b/typedapi/types/scriptscorequery.go old mode 100755 new mode 100644 index 9869a66024..8652ca3282 --- a/typedapi/types/scriptscorequery.go +++ b/typedapi/types/scriptscorequery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ScriptScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L168-L172 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L168-L172 type ScriptScoreQuery struct { Boost *float32 `json:"boost,omitempty"` MinScore *float32 `json:"min_score,omitempty"` @@ -31,6 +41,76 @@ type ScriptScoreQuery struct { Script Script `json:"script"` } +func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "min_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MinScore = &f + case float64: + f := float32(v) + s.MinScore = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewScriptScoreQuery returns a ScriptScoreQuery. func NewScriptScoreQuery() *ScriptScoreQuery { r := &ScriptScoreQuery{} diff --git a/typedapi/types/scriptsort.go b/typedapi/types/scriptsort.go old mode 100755 new mode 100644 index 4cac1c8a8f..97e384fb68 --- a/typedapi/types/scriptsort.go +++ b/typedapi/types/scriptsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,11 +24,17 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/scriptsorttype" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortorder" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ScriptSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L68-L74 type ScriptSort struct { Mode *sortmode.SortMode `json:"mode,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` @@ -37,6 +43,51 @@ type ScriptSort struct { Type *scriptsorttype.ScriptSortType `json:"type,omitempty"` } +func (s *ScriptSort) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + case "nested": + if err := dec.Decode(&s.Nested); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewScriptSort returns a ScriptSort. func NewScriptSort() *ScriptSort { r := &ScriptSort{} diff --git a/typedapi/types/scripttransform.go b/typedapi/types/scripttransform.go old mode 100755 new mode 100644 index ac60b78765..8cbc45a012 --- a/typedapi/types/scripttransform.go +++ b/typedapi/types/scripttransform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // ScriptTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Transform.ts#L36-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Transform.ts#L36-L44 type ScriptTransform struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` diff --git a/typedapi/types/scrollids.go b/typedapi/types/scrollids.go old mode 100755 new mode 100644 index a6d99f6958..c3e0a917c1 --- a/typedapi/types/scrollids.go +++ b/typedapi/types/scrollids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ScrollIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L50-L50 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L50-L50 type ScrollIds []string diff --git a/typedapi/types/searchablesnapshots.go b/typedapi/types/searchablesnapshots.go old mode 100755 new mode 100644 index b960f8306a..b419d302f2 --- a/typedapi/types/searchablesnapshots.go +++ b/typedapi/types/searchablesnapshots.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SearchableSnapshots type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L419-L423 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L419-L423 type SearchableSnapshots struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -31,6 +41,102 @@ type SearchableSnapshots struct { SharedCacheIndicesCount *int `json:"shared_cache_indices_count,omitempty"` } +func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "full_copy_indices_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FullCopyIndicesCount = &value + case float64: + f := int(v) + s.FullCopyIndicesCount = &f + } + + case "indices_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IndicesCount = value + case float64: + f := int(v) + s.IndicesCount = f + } + + case "shared_cache_indices_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SharedCacheIndicesCount = &value + case float64: + f := int(v) + s.SharedCacheIndicesCount = &f + } + + } + } + return nil +} + // NewSearchableSnapshots returns a SearchableSnapshots. func NewSearchableSnapshots() *SearchableSnapshots { r := &SearchableSnapshots{} diff --git a/typedapi/types/searchapplication.go b/typedapi/types/searchapplication.go new file mode 100644 index 0000000000..e74c9df384 --- /dev/null +++ b/typedapi/types/searchapplication.go @@ -0,0 +1,97 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// SearchApplication type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/_types/SearchApplication.ts#L24-L45 +type SearchApplication struct { + // AnalyticsCollectionName Analytics collection associated to the Search Application + AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` + // Indices Indices that are part of the Search Application + Indices []string `json:"indices"` + // Name Search Application name + Name string `json:"name"` + // Template Search template to use on search operations + Template *SearchApplicationTemplate `json:"template,omitempty"` + // UpdatedAtMillis Last time the Search Application was updated + UpdatedAtMillis int64 `json:"updated_at_millis"` +} + +func (s *SearchApplication) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analytics_collection_name": + if err := dec.Decode(&s.AnalyticsCollectionName); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + case "updated_at_millis": + if err := dec.Decode(&s.UpdatedAtMillis); err != nil { + return err + } + + } + } + return nil +} + +// NewSearchApplication returns a SearchApplication. +func NewSearchApplication() *SearchApplication { + r := &SearchApplication{} + + return r +} diff --git a/typedapi/types/searchapplicationlistitem.go b/typedapi/types/searchapplicationlistitem.go new file mode 100644 index 0000000000..07336b7a18 --- /dev/null +++ b/typedapi/types/searchapplicationlistitem.go @@ -0,0 +1,90 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + +// SearchApplicationListItem type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/list/SearchApplicationsListResponse.ts#L31-L48 +type SearchApplicationListItem struct { + // AnalyticsCollectionName Analytics collection associated to the Search Application + AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` + // Indices Indices that are part of the Search Application + Indices []string `json:"indices"` + // Name Search Application name + Name string `json:"name"` + // UpdatedAtMillis Last time the Search Application was updated + UpdatedAtMillis int64 `json:"updated_at_millis"` +} + +func (s *SearchApplicationListItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analytics_collection_name": + if err := dec.Decode(&s.AnalyticsCollectionName); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "updated_at_millis": + if err := dec.Decode(&s.UpdatedAtMillis); err != nil { + return err + } + + } + } + return nil +} + +// NewSearchApplicationListItem returns a SearchApplicationListItem. +func NewSearchApplicationListItem() *SearchApplicationListItem { + r := &SearchApplicationListItem{} + + return r +} diff --git a/typedapi/types/searchapplicationtemplate.go b/typedapi/types/searchapplicationtemplate.go new file mode 100644 index 0000000000..4a1bd131be --- /dev/null +++ b/typedapi/types/searchapplicationtemplate.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +// SearchApplicationTemplate type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/search_application/_types/SearchApplication.ts#L47-L49 +type SearchApplicationTemplate struct { + Script InlineScript `json:"script"` +} + +// NewSearchApplicationTemplate returns a SearchApplicationTemplate. +func NewSearchApplicationTemplate() *SearchApplicationTemplate { + r := &SearchApplicationTemplate{} + + return r +} diff --git a/typedapi/types/searchasyoutypeproperty.go b/typedapi/types/searchasyoutypeproperty.go old mode 100755 new mode 100644 index ca126d8143..2a7bb1f38c --- a/typedapi/types/searchasyoutypeproperty.go +++ b/typedapi/types/searchasyoutypeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SearchAsYouTypeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L190-L200 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L190-L200 type SearchAsYouTypeProperty struct { Analyzer *string `json:"analyzer,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -57,6 +59,7 @@ type SearchAsYouTypeProperty struct { } func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -71,13 +74,27 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { switch t { case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "dynamic": @@ -86,6 +103,9 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -373,20 +393,40 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -395,21 +435,47 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } case "max_shingle_size": - if err := dec.Decode(&s.MaxShingleSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxShingleSize = &value + case float64: + f := int(v) + s.MaxShingleSize = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "norms": - if err := dec.Decode(&s.Norms); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Norms = &value + case bool: + s.Norms = &v } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -697,30 +763,48 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "search_analyzer": - if err := dec.Decode(&s.SearchAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchAnalyzer = &o case "search_quote_analyzer": - if err := dec.Decode(&s.SearchQuoteAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchQuoteAnalyzer = &o case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "term_vector": diff --git a/typedapi/types/searchidle.go b/typedapi/types/searchidle.go old mode 100755 new mode 100644 index 70ee29b036..e6435971a7 --- a/typedapi/types/searchidle.go +++ b/typedapi/types/searchidle.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SearchIdle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L236-L239 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L236-L239 type SearchIdle struct { After Duration `json:"after,omitempty"` } +func (s *SearchIdle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "after": + if err := dec.Decode(&s.After); err != nil { + return err + } + + } + } + return nil +} + // NewSearchIdle returns a SearchIdle. func NewSearchIdle() *SearchIdle { r := &SearchIdle{} diff --git a/typedapi/types/searchinput.go b/typedapi/types/searchinput.go old mode 100755 new mode 100644 index 3119c007e1..4811850e81 --- a/typedapi/types/searchinput.go +++ b/typedapi/types/searchinput.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SearchInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L112-L116 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L112-L116 type SearchInput struct { Extract []string `json:"extract,omitempty"` Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout,omitempty"` } +func (s *SearchInput) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "extract": + if err := dec.Decode(&s.Extract); err != nil { + return err + } + + case "request": + if err := dec.Decode(&s.Request); err != nil { + return err + } + + case "timeout": + if err := dec.Decode(&s.Timeout); err != nil { + return err + } + + } + } + return nil +} + // NewSearchInput returns a SearchInput. func NewSearchInput() *SearchInput { r := &SearchInput{} diff --git a/typedapi/types/searchinputrequestbody.go b/typedapi/types/searchinputrequestbody.go old mode 100755 new mode 100644 index 8a53f5f5a3..f9e4b3f974 --- a/typedapi/types/searchinputrequestbody.go +++ b/typedapi/types/searchinputrequestbody.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SearchInputRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L147-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L147-L149 type SearchInputRequestBody struct { Query Query `json:"query"` } diff --git a/typedapi/types/searchinputrequestdefinition.go b/typedapi/types/searchinputrequestdefinition.go old mode 100755 new mode 100644 index d1a90458ff..4b879e6cd1 --- a/typedapi/types/searchinputrequestdefinition.go +++ b/typedapi/types/searchinputrequestdefinition.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/searchtype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SearchInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L118-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L118-L125 type SearchInputRequestDefinition struct { Body *SearchInputRequestBody `json:"body,omitempty"` Indices []string `json:"indices,omitempty"` @@ -36,6 +44,65 @@ type SearchInputRequestDefinition struct { Template *SearchTemplateRequestBody `json:"template,omitempty"` } +func (s *SearchInputRequestDefinition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "body": + if err := dec.Decode(&s.Body); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "indices_options": + if err := dec.Decode(&s.IndicesOptions); err != nil { + return err + } + + case "rest_total_hits_as_int": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RestTotalHitsAsInt = &value + case bool: + s.RestTotalHitsAsInt = &v + } + + case "search_type": + if err := dec.Decode(&s.SearchType); err != nil { + return err + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return err + } + + } + } + return nil +} + // NewSearchInputRequestDefinition returns a SearchInputRequestDefinition. func NewSearchInputRequestDefinition() *SearchInputRequestDefinition { r := &SearchInputRequestDefinition{} diff --git a/typedapi/types/searchprofile.go b/typedapi/types/searchprofile.go old mode 100755 new mode 100644 index ad7d8bddf9..ecc2e46ba2 --- a/typedapi/types/searchprofile.go +++ b/typedapi/types/searchprofile.go @@ -16,19 +16,74 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SearchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L126-L130 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L126-L130 type SearchProfile struct { Collector []Collector `json:"collector"` Query []QueryProfile `json:"query"` RewriteTime int64 `json:"rewrite_time"` } +func (s *SearchProfile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collector": + if err := dec.Decode(&s.Collector); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "rewrite_time": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RewriteTime = value + case float64: + f := int64(v) + s.RewriteTime = f + } + + } + } + return nil +} + // NewSearchProfile returns a SearchProfile. func NewSearchProfile() *SearchProfile { r := &SearchProfile{} diff --git a/typedapi/types/searchstats.go b/typedapi/types/searchstats.go old mode 100755 new mode 100644 index 4b93de53ba..1420202100 --- a/typedapi/types/searchstats.go +++ b/typedapi/types/searchstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SearchStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L185-L204 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L185-L204 type SearchStats struct { FetchCurrent int64 `json:"fetch_current"` FetchTime Duration `json:"fetch_time,omitempty"` @@ -44,6 +54,209 @@ type SearchStats struct { SuggestTotal int64 `json:"suggest_total"` } +func (s *SearchStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fetch_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FetchCurrent = value + case float64: + f := int64(v) + s.FetchCurrent = f + } + + case "fetch_time": + if err := dec.Decode(&s.FetchTime); err != nil { + return err + } + + case "fetch_time_in_millis": + if err := dec.Decode(&s.FetchTimeInMillis); err != nil { + return err + } + + case "fetch_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FetchTotal = value + case float64: + f := int64(v) + s.FetchTotal = f + } + + case "groups": + if s.Groups == nil { + s.Groups = make(map[string]SearchStats, 0) + } + if err := dec.Decode(&s.Groups); err != nil { + return err + } + + case "open_contexts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.OpenContexts = &value + case float64: + f := int64(v) + s.OpenContexts = &f + } + + case "query_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.QueryCurrent = value + case float64: + f := int64(v) + s.QueryCurrent = f + } + + case "query_time": + if err := dec.Decode(&s.QueryTime); err != nil { + return err + } + + case "query_time_in_millis": + if err := dec.Decode(&s.QueryTimeInMillis); err != nil { + return err + } + + case "query_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.QueryTotal = value + case float64: + f := int64(v) + s.QueryTotal = f + } + + case "scroll_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ScrollCurrent = value + case float64: + f := int64(v) + s.ScrollCurrent = f + } + + case "scroll_time": + if err := dec.Decode(&s.ScrollTime); err != nil { + return err + } + + case "scroll_time_in_millis": + if err := dec.Decode(&s.ScrollTimeInMillis); err != nil { + return err + } + + case "scroll_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ScrollTotal = value + case float64: + f := int64(v) + s.ScrollTotal = f + } + + case "suggest_current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SuggestCurrent = value + case float64: + f := int64(v) + s.SuggestCurrent = f + } + + case "suggest_time": + if err := dec.Decode(&s.SuggestTime); err != nil { + return err + } + + case "suggest_time_in_millis": + if err := dec.Decode(&s.SuggestTimeInMillis); err != nil { + return err + } + + case "suggest_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SuggestTotal = value + case float64: + f := int64(v) + s.SuggestTotal = f + } + + } + } + return nil +} + // NewSearchStats returns a SearchStats. func NewSearchStats() *SearchStats { r := &SearchStats{ diff --git a/typedapi/types/searchtemplaterequestbody.go b/typedapi/types/searchtemplaterequestbody.go old mode 100755 new mode 100644 index 57400cdcae..7e1a9cf775 --- a/typedapi/types/searchtemplaterequestbody.go +++ b/typedapi/types/searchtemplaterequestbody.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // SearchTemplateRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L128-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L128-L145 type SearchTemplateRequestBody struct { Explain *bool `json:"explain,omitempty"` // Id ID of the search template to use. If no source is specified, @@ -40,6 +46,75 @@ type SearchTemplateRequestBody struct { Source *string `json:"source,omitempty"` } +func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "explain": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Explain = &value + case bool: + s.Explain = &v + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "profile": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Profile = &value + case bool: + s.Profile = &v + } + + case "source": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Source = &o + + } + } + return nil +} + // NewSearchTemplateRequestBody returns a SearchTemplateRequestBody. func NewSearchTemplateRequestBody() *SearchTemplateRequestBody { r := &SearchTemplateRequestBody{ diff --git a/typedapi/types/searchtransform.go b/typedapi/types/searchtransform.go old mode 100755 new mode 100644 index 8705667b86..a149fdaa8f --- a/typedapi/types/searchtransform.go +++ b/typedapi/types/searchtransform.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SearchTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Transform.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Transform.ts#L46-L49 type SearchTransform struct { Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout"` } +func (s *SearchTransform) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "request": + if err := dec.Decode(&s.Request); err != nil { + return err + } + + case "timeout": + if err := dec.Decode(&s.Timeout); err != nil { + return err + } + + } + } + return nil +} + // NewSearchTransform returns a SearchTransform. func NewSearchTransform() *SearchTransform { r := &SearchTransform{} diff --git a/typedapi/types/security.go b/typedapi/types/security.go old mode 100755 new mode 100644 index 4246dc3056..ada805add4 --- a/typedapi/types/security.go +++ b/typedapi/types/security.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Security type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L425-L438 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L425-L438 type Security struct { Anonymous FeatureToggle `json:"anonymous"` ApiKeyService FeatureToggle `json:"api_key_service"` @@ -40,6 +50,120 @@ type Security struct { TokenService FeatureToggle `json:"token_service"` } +func (s *Security) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "anonymous": + if err := dec.Decode(&s.Anonymous); err != nil { + return err + } + + case "api_key_service": + if err := dec.Decode(&s.ApiKeyService); err != nil { + return err + } + + case "audit": + if err := dec.Decode(&s.Audit); err != nil { + return err + } + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "fips_140": + if err := dec.Decode(&s.Fips140); err != nil { + return err + } + + case "ipfilter": + if err := dec.Decode(&s.Ipfilter); err != nil { + return err + } + + case "operator_privileges": + if err := dec.Decode(&s.OperatorPrivileges); err != nil { + return err + } + + case "realms": + if s.Realms == nil { + s.Realms = make(map[string]XpackRealm, 0) + } + if err := dec.Decode(&s.Realms); err != nil { + return err + } + + case "role_mapping": + if s.RoleMapping == nil { + s.RoleMapping = make(map[string]XpackRoleMapping, 0) + } + if err := dec.Decode(&s.RoleMapping); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "ssl": + if err := dec.Decode(&s.Ssl); err != nil { + return err + } + + case "system_key": + if err := dec.Decode(&s.SystemKey); err != nil { + return err + } + + case "token_service": + if err := dec.Decode(&s.TokenService); err != nil { + return err + } + + } + } + return nil +} + // NewSecurity returns a Security. func NewSecurity() *Security { r := &Security{ diff --git a/typedapi/types/securityrealm.go b/typedapi/types/securityrealm.go old mode 100755 new mode 100644 index 81cbd1bf68..213222e12b --- a/typedapi/types/securityrealm.go +++ b/typedapi/types/securityrealm.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SecurityRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleMappingRule.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleMappingRule.ts#L44-L46 type SecurityRealm struct { Name string `json:"name"` } +func (s *SecurityRealm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewSecurityRealm returns a SecurityRealm. func NewSecurityRealm() *SecurityRealm { r := &SecurityRealm{} diff --git a/typedapi/types/securityrolemapping.go b/typedapi/types/securityrolemapping.go old mode 100755 new mode 100644 index cd16081861..df246d93fb --- a/typedapi/types/securityrolemapping.go +++ b/typedapi/types/securityrolemapping.go @@ -16,23 +16,83 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // SecurityRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/RoleMapping.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/RoleMapping.ts#L25-L31 type SecurityRoleMapping struct { - Enabled bool `json:"enabled"` - Metadata map[string]json.RawMessage `json:"metadata"` - RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` - Roles []string `json:"roles"` - Rules RoleMappingRule `json:"rules"` + Enabled bool `json:"enabled"` + Metadata Metadata `json:"metadata"` + RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` + Roles []string `json:"roles"` + Rules RoleMappingRule `json:"rules"` +} + +func (s *SecurityRoleMapping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "role_templates": + if err := dec.Decode(&s.RoleTemplates); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "rules": + if err := dec.Decode(&s.Rules); err != nil { + return err + } + + } + } + return nil } // NewSecurityRoleMapping returns a SecurityRoleMapping. diff --git a/typedapi/types/securityroles.go b/typedapi/types/securityroles.go old mode 100755 new mode 100644 index 98bf9052ea..5dd481af7c --- a/typedapi/types/securityroles.go +++ b/typedapi/types/securityroles.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SecurityRoles type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L290-L294 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L290-L294 type SecurityRoles struct { Dls SecurityRolesDls `json:"dls"` File SecurityRolesFile `json:"file"` diff --git a/typedapi/types/securityrolesdls.go b/typedapi/types/securityrolesdls.go old mode 100755 new mode 100644 index 1c56c86eb4..66c7b1588f --- a/typedapi/types/securityrolesdls.go +++ b/typedapi/types/securityrolesdls.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SecurityRolesDls type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L302-L304 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L302-L304 type SecurityRolesDls struct { BitSetCache SecurityRolesDlsBitSetCache `json:"bit_set_cache"` } diff --git a/typedapi/types/securityrolesdlsbitsetcache.go b/typedapi/types/securityrolesdlsbitsetcache.go old mode 100755 new mode 100644 index 7bd3a0be5e..139bb0ee82 --- a/typedapi/types/securityrolesdlsbitsetcache.go +++ b/typedapi/types/securityrolesdlsbitsetcache.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SecurityRolesDlsBitSetCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L306-L310 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L306-L310 type SecurityRolesDlsBitSetCache struct { Count int `json:"count"` Memory ByteSize `json:"memory,omitempty"` MemoryInBytes uint64 `json:"memory_in_bytes"` } +func (s *SecurityRolesDlsBitSetCache) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "memory": + if err := dec.Decode(&s.Memory); err != nil { + return err + } + + case "memory_in_bytes": + if err := dec.Decode(&s.MemoryInBytes); err != nil { + return err + } + + } + } + return nil +} + // NewSecurityRolesDlsBitSetCache returns a SecurityRolesDlsBitSetCache. func NewSecurityRolesDlsBitSetCache() *SecurityRolesDlsBitSetCache { r := &SecurityRolesDlsBitSetCache{} diff --git a/typedapi/types/securityrolesfile.go b/typedapi/types/securityrolesfile.go old mode 100755 new mode 100644 index 3925a83bec..070ed5f341 --- a/typedapi/types/securityrolesfile.go +++ b/typedapi/types/securityrolesfile.go @@ -16,19 +16,92 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SecurityRolesFile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L312-L316 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L312-L316 type SecurityRolesFile struct { Dls bool `json:"dls"` Fls bool `json:"fls"` Size int64 `json:"size"` } +func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dls": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Dls = value + case bool: + s.Dls = v + } + + case "fls": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Fls = value + case bool: + s.Fls = v + } + + case "size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Size = value + case float64: + f := int64(v) + s.Size = f + } + + } + } + return nil +} + // NewSecurityRolesFile returns a SecurityRolesFile. func NewSecurityRolesFile() *SecurityRolesFile { r := &SecurityRolesFile{} diff --git a/typedapi/types/securityrolesnative.go b/typedapi/types/securityrolesnative.go old mode 100755 new mode 100644 index 84a3eab8cb..b4b9e12d87 --- a/typedapi/types/securityrolesnative.go +++ b/typedapi/types/securityrolesnative.go @@ -16,19 +16,92 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SecurityRolesNative type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L296-L300 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L296-L300 type SecurityRolesNative struct { Dls bool `json:"dls"` Fls bool `json:"fls"` Size int64 `json:"size"` } +func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "dls": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Dls = value + case bool: + s.Dls = v + } + + case "fls": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Fls = value + case bool: + s.Fls = v + } + + case "size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Size = value + case float64: + f := int64(v) + s.Size = f + } + + } + } + return nil +} + // NewSecurityRolesNative returns a SecurityRolesNative. func NewSecurityRolesNative() *SecurityRolesNative { r := &SecurityRolesNative{} diff --git a/typedapi/types/segment.go b/typedapi/types/segment.go old mode 100755 new mode 100644 index 1bbe7994c5..fa93ebe20f --- a/typedapi/types/segment.go +++ b/typedapi/types/segment.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Segment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/segments/types.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/segments/types.ts#L28-L38 type Segment struct { Attributes map[string]string `json:"attributes"` Committed bool `json:"committed"` @@ -35,6 +45,143 @@ type Segment struct { Version string `json:"version"` } +func (s *Segment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "committed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Committed = value + case bool: + s.Committed = v + } + + case "compound": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Compound = value + case bool: + s.Compound = v + } + + case "deleted_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DeletedDocs = value + case float64: + f := int64(v) + s.DeletedDocs = f + } + + case "generation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Generation = value + case float64: + f := int(v) + s.Generation = f + } + + case "num_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumDocs = value + case float64: + f := int64(v) + s.NumDocs = f + } + + case "search": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Search = value + case bool: + s.Search = v + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.SizeInBytes = f + case float64: + f := Float64(v) + s.SizeInBytes = f + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSegment returns a Segment. func NewSegment() *Segment { r := &Segment{ diff --git a/typedapi/types/segmentsrecord.go b/typedapi/types/segmentsrecord.go old mode 100755 new mode 100644 index e1d7fc2140..0abf30a9f4 --- a/typedapi/types/segmentsrecord.go +++ b/typedapi/types/segmentsrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SegmentsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/segments/types.ts#L22-L96 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/segments/types.ts#L22-L96 type SegmentsRecord struct { // Committed is segment committed Committed *string `json:"committed,omitempty"` @@ -56,6 +64,131 @@ type SegmentsRecord struct { Version *string `json:"version,omitempty"` } +func (s *SegmentsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "committed", "ic", "isCommitted": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Committed = &o + + case "compound", "ico", "isCompound": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Compound = &o + + case "docs.count", "dc", "docsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocsCount = &o + + case "docs.deleted", "dd", "docsDeleted": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocsDeleted = &o + + case "generation", "g", "gen": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Generation = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index", "i", "idx": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "ip": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Ip = &o + + case "prirep", "p", "pr", "primaryOrReplica": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Prirep = &o + + case "searchable", "is", "isSearchable": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Searchable = &o + + case "segment", "seg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Segment = &o + + case "shard", "s", "sh": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shard = &o + + case "size", "si": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "size.memory", "sm", "sizeMemory": + if err := dec.Decode(&s.SizeMemory); err != nil { + return err + } + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSegmentsRecord returns a SegmentsRecord. func NewSegmentsRecord() *SegmentsRecord { r := &SegmentsRecord{} diff --git a/typedapi/types/segmentsstats.go b/typedapi/types/segmentsstats.go old mode 100755 new mode 100644 index da90233c12..34bf3340d6 --- a/typedapi/types/segmentsstats.go +++ b/typedapi/types/segmentsstats.go @@ -16,38 +16,322 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SegmentsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L206-L231 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L206-L231 type SegmentsStats struct { Count int `json:"count"` DocValuesMemory ByteSize `json:"doc_values_memory,omitempty"` - DocValuesMemoryInBytes int `json:"doc_values_memory_in_bytes"` + DocValuesMemoryInBytes int64 `json:"doc_values_memory_in_bytes"` FileSizes map[string]ShardFileSizeInfo `json:"file_sizes"` FixedBitSet ByteSize `json:"fixed_bit_set,omitempty"` - FixedBitSetMemoryInBytes int `json:"fixed_bit_set_memory_in_bytes"` - IndexWriterMaxMemoryInBytes *int `json:"index_writer_max_memory_in_bytes,omitempty"` + FixedBitSetMemoryInBytes int64 `json:"fixed_bit_set_memory_in_bytes"` + IndexWriterMaxMemoryInBytes *int64 `json:"index_writer_max_memory_in_bytes,omitempty"` IndexWriterMemory ByteSize `json:"index_writer_memory,omitempty"` - IndexWriterMemoryInBytes int `json:"index_writer_memory_in_bytes"` + IndexWriterMemoryInBytes int64 `json:"index_writer_memory_in_bytes"` MaxUnsafeAutoIdTimestamp int64 `json:"max_unsafe_auto_id_timestamp"` Memory ByteSize `json:"memory,omitempty"` - MemoryInBytes int `json:"memory_in_bytes"` + MemoryInBytes int64 `json:"memory_in_bytes"` NormsMemory ByteSize `json:"norms_memory,omitempty"` - NormsMemoryInBytes int `json:"norms_memory_in_bytes"` + NormsMemoryInBytes int64 `json:"norms_memory_in_bytes"` PointsMemory ByteSize `json:"points_memory,omitempty"` - PointsMemoryInBytes int `json:"points_memory_in_bytes"` - StoredFieldsMemoryInBytes int `json:"stored_fields_memory_in_bytes"` + PointsMemoryInBytes int64 `json:"points_memory_in_bytes"` + StoredFieldsMemoryInBytes int64 `json:"stored_fields_memory_in_bytes"` StoredMemory ByteSize `json:"stored_memory,omitempty"` - TermVectorsMemoryInBytes int `json:"term_vectors_memory_in_bytes"` + TermVectorsMemoryInBytes int64 `json:"term_vectors_memory_in_bytes"` TermVectoryMemory ByteSize `json:"term_vectory_memory,omitempty"` TermsMemory ByteSize `json:"terms_memory,omitempty"` - TermsMemoryInBytes int `json:"terms_memory_in_bytes"` + TermsMemoryInBytes int64 `json:"terms_memory_in_bytes"` VersionMapMemory ByteSize `json:"version_map_memory,omitempty"` - VersionMapMemoryInBytes int `json:"version_map_memory_in_bytes"` + VersionMapMemoryInBytes int64 `json:"version_map_memory_in_bytes"` +} + +func (s *SegmentsStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = value + case float64: + f := int(v) + s.Count = f + } + + case "doc_values_memory": + if err := dec.Decode(&s.DocValuesMemory); err != nil { + return err + } + + case "doc_values_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocValuesMemoryInBytes = value + case float64: + f := int64(v) + s.DocValuesMemoryInBytes = f + } + + case "file_sizes": + if s.FileSizes == nil { + s.FileSizes = make(map[string]ShardFileSizeInfo, 0) + } + if err := dec.Decode(&s.FileSizes); err != nil { + return err + } + + case "fixed_bit_set": + if err := dec.Decode(&s.FixedBitSet); err != nil { + return err + } + + case "fixed_bit_set_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FixedBitSetMemoryInBytes = value + case float64: + f := int64(v) + s.FixedBitSetMemoryInBytes = f + } + + case "index_writer_max_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexWriterMaxMemoryInBytes = &value + case float64: + f := int64(v) + s.IndexWriterMaxMemoryInBytes = &f + } + + case "index_writer_memory": + if err := dec.Decode(&s.IndexWriterMemory); err != nil { + return err + } + + case "index_writer_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexWriterMemoryInBytes = value + case float64: + f := int64(v) + s.IndexWriterMemoryInBytes = f + } + + case "max_unsafe_auto_id_timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxUnsafeAutoIdTimestamp = value + case float64: + f := int64(v) + s.MaxUnsafeAutoIdTimestamp = f + } + + case "memory": + if err := dec.Decode(&s.Memory); err != nil { + return err + } + + case "memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemoryInBytes = value + case float64: + f := int64(v) + s.MemoryInBytes = f + } + + case "norms_memory": + if err := dec.Decode(&s.NormsMemory); err != nil { + return err + } + + case "norms_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NormsMemoryInBytes = value + case float64: + f := int64(v) + s.NormsMemoryInBytes = f + } + + case "points_memory": + if err := dec.Decode(&s.PointsMemory); err != nil { + return err + } + + case "points_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PointsMemoryInBytes = value + case float64: + f := int64(v) + s.PointsMemoryInBytes = f + } + + case "stored_fields_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StoredFieldsMemoryInBytes = value + case float64: + f := int64(v) + s.StoredFieldsMemoryInBytes = f + } + + case "stored_memory": + if err := dec.Decode(&s.StoredMemory); err != nil { + return err + } + + case "term_vectors_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TermVectorsMemoryInBytes = value + case float64: + f := int64(v) + s.TermVectorsMemoryInBytes = f + } + + case "term_vectory_memory": + if err := dec.Decode(&s.TermVectoryMemory); err != nil { + return err + } + + case "terms_memory": + if err := dec.Decode(&s.TermsMemory); err != nil { + return err + } + + case "terms_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TermsMemoryInBytes = value + case float64: + f := int64(v) + s.TermsMemoryInBytes = f + } + + case "version_map_memory": + if err := dec.Decode(&s.VersionMapMemory); err != nil { + return err + } + + case "version_map_memory_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.VersionMapMemoryInBytes = value + case float64: + f := int64(v) + s.VersionMapMemoryInBytes = f + } + + } + } + return nil } // NewSegmentsStats returns a SegmentsStats. diff --git a/typedapi/types/serialdifferencingaggregation.go b/typedapi/types/serialdifferencingaggregation.go old mode 100755 new mode 100644 index cab0224416..b626e17b6e --- a/typedapi/types/serialdifferencingaggregation.go +++ b/typedapi/types/serialdifferencingaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,23 +27,26 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SerialDifferencingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L280-L282 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L280-L282 type SerialDifferencingAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Lag *int `json:"lag,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Lag *int `json:"lag,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,9 +66,12 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -73,8 +79,19 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { } case "lag": - if err := dec.Decode(&s.Lag); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Lag = &value + case float64: + f := int(v) + s.Lag = &f } case "meta": @@ -83,9 +100,12 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/serializedclusterstate.go b/typedapi/types/serializedclusterstate.go old mode 100755 new mode 100644 index 23e8303350..3460c48375 --- a/typedapi/types/serializedclusterstate.go +++ b/typedapi/types/serializedclusterstate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SerializedClusterState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L101-L104 type SerializedClusterState struct { Diffs *SerializedClusterStateDetail `json:"diffs,omitempty"` FullStates *SerializedClusterStateDetail `json:"full_states,omitempty"` diff --git a/typedapi/types/serializedclusterstatedetail.go b/typedapi/types/serializedclusterstatedetail.go old mode 100755 new mode 100644 index 9430e0234d..805e0ebbc7 --- a/typedapi/types/serializedclusterstatedetail.go +++ b/typedapi/types/serializedclusterstatedetail.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SerializedClusterStateDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L106-L112 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L106-L112 type SerializedClusterStateDetail struct { CompressedSize *string `json:"compressed_size,omitempty"` CompressedSizeInBytes *int64 `json:"compressed_size_in_bytes,omitempty"` @@ -31,6 +41,87 @@ type SerializedClusterStateDetail struct { UncompressedSizeInBytes *int64 `json:"uncompressed_size_in_bytes,omitempty"` } +func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compressed_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompressedSize = &o + + case "compressed_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CompressedSizeInBytes = &value + case float64: + f := int64(v) + s.CompressedSizeInBytes = &f + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "uncompressed_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UncompressedSize = &o + + case "uncompressed_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UncompressedSizeInBytes = &value + case float64: + f := int64(v) + s.UncompressedSizeInBytes = &f + } + + } + } + return nil +} + // NewSerializedClusterStateDetail returns a SerializedClusterStateDetail. func NewSerializedClusterStateDetail() *SerializedClusterStateDetail { r := &SerializedClusterStateDetail{} diff --git a/typedapi/types/servicetoken.go b/typedapi/types/servicetoken.go old mode 100755 new mode 100644 index b5f35ebc7e..48931716e0 --- a/typedapi/types/servicetoken.go +++ b/typedapi/types/servicetoken.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ServiceToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/create_service_token/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/create_service_token/types.ts#L22-L25 type ServiceToken struct { Name string `json:"name"` Value string `json:"value"` } +func (s *ServiceToken) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewServiceToken returns a ServiceToken. func NewServiceToken() *ServiceToken { r := &ServiceToken{} diff --git a/typedapi/types/setprocessor.go b/typedapi/types/setprocessor.go old mode 100755 new mode 100644 index 5bea5e29f4..295ee6405b --- a/typedapi/types/setprocessor.go +++ b/typedapi/types/setprocessor.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // SetProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L329-L336 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L329-L336 type SetProcessor struct { CopyFrom *string `json:"copy_from,omitempty"` Description *string `json:"description,omitempty"` @@ -41,6 +47,120 @@ type SetProcessor struct { Value json.RawMessage `json:"value,omitempty"` } +func (s *SetProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "copy_from": + if err := dec.Decode(&s.CopyFrom); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_empty_value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreEmptyValue = &value + case bool: + s.IgnoreEmptyValue = &v + } + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "media_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MediaType = &o + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "override": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Override = &value + case bool: + s.Override = &v + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + } + } + return nil +} + // NewSetProcessor returns a SetProcessor. func NewSetProcessor() *SetProcessor { r := &SetProcessor{} diff --git a/typedapi/types/setsecurityuserprocessor.go b/typedapi/types/setsecurityuserprocessor.go old mode 100755 new mode 100644 index d785ecfaae..ed9ec7522d --- a/typedapi/types/setsecurityuserprocessor.go +++ b/typedapi/types/setsecurityuserprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SetSecurityUserProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L338-L341 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L338-L341 type SetSecurityUserProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -33,6 +43,79 @@ type SetSecurityUserProcessor struct { Tag *string `json:"tag,omitempty"` } +func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "properties": + if err := dec.Decode(&s.Properties); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + } + } + return nil +} + // NewSetSecurityUserProcessor returns a SetSecurityUserProcessor. func NewSetSecurityUserProcessor() *SetSecurityUserProcessor { r := &SetSecurityUserProcessor{} diff --git a/typedapi/types/settings.go b/typedapi/types/settings.go old mode 100755 new mode 100644 index dc57e72962..b9f07a727c --- a/typedapi/types/settings.go +++ b/typedapi/types/settings.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Settings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L98-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L98-L143 type Settings struct { // AlignCheckpoints Specifies whether the transform checkpoint ranges should be optimized for // performance. Such optimization can align @@ -59,6 +69,114 @@ type Settings struct { Unattended *bool `json:"unattended,omitempty"` } +func (s *Settings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "align_checkpoints": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AlignCheckpoints = &value + case bool: + s.AlignCheckpoints = &v + } + + case "dates_as_epoch_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DatesAsEpochMillis = &value + case bool: + s.DatesAsEpochMillis = &v + } + + case "deduce_mappings": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DeduceMappings = &value + case bool: + s.DeduceMappings = &v + } + + case "docs_per_second": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.DocsPerSecond = &f + case float64: + f := float32(v) + s.DocsPerSecond = &f + } + + case "max_page_search_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxPageSearchSize = &value + case float64: + f := int(v) + s.MaxPageSearchSize = &f + } + + case "unattended": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Unattended = &value + case bool: + s.Unattended = &v + } + + } + } + return nil +} + // NewSettings returns a Settings. func NewSettings() *Settings { r := &Settings{} diff --git a/typedapi/types/settingsanalyze.go b/typedapi/types/settingsanalyze.go old mode 100755 new mode 100644 index 9938563a7f..23ee053227 --- a/typedapi/types/settingsanalyze.go +++ b/typedapi/types/settingsanalyze.go @@ -16,15 +16,48 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SettingsAnalyze type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L226-L229 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L226-L229 type SettingsAnalyze struct { - MaxTokenCount *int `json:"max_token_count,omitempty"` + MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` +} + +func (s *SettingsAnalyze) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_count": + if err := dec.Decode(&s.MaxTokenCount); err != nil { + return err + } + + } + } + return nil } // NewSettingsAnalyze returns a SettingsAnalyze. diff --git a/typedapi/types/settingshighlight.go b/typedapi/types/settingshighlight.go old mode 100755 new mode 100644 index 39a38abd0c..2f16d36218 --- a/typedapi/types/settingshighlight.go +++ b/typedapi/types/settingshighlight.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SettingsHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L221-L224 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L221-L224 type SettingsHighlight struct { MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"` } +func (s *SettingsHighlight) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_analyzed_offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAnalyzedOffset = &value + case float64: + f := int(v) + s.MaxAnalyzedOffset = &f + } + + } + } + return nil +} + // NewSettingsHighlight returns a SettingsHighlight. func NewSettingsHighlight() *SettingsHighlight { r := &SettingsHighlight{} diff --git a/typedapi/types/settingsquerystring.go b/typedapi/types/settingsquerystring.go old mode 100755 new mode 100644 index 9209c1c642..039cc50e32 --- a/typedapi/types/settingsquerystring.go +++ b/typedapi/types/settingsquerystring.go @@ -16,15 +16,48 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SettingsQueryString type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L241-L243 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L241-L243 type SettingsQueryString struct { - Lenient bool `json:"lenient"` + Lenient Stringifiedboolean `json:"lenient"` +} + +func (s *SettingsQueryString) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lenient": + if err := dec.Decode(&s.Lenient); err != nil { + return err + } + + } + } + return nil } // NewSettingsQueryString returns a SettingsQueryString. diff --git a/typedapi/types/settingssearch.go b/typedapi/types/settingssearch.go old mode 100755 new mode 100644 index 0900f083b2..a17b688e15 --- a/typedapi/types/settingssearch.go +++ b/typedapi/types/settingssearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SettingsSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L231-L234 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L231-L234 type SettingsSearch struct { Idle *SearchIdle `json:"idle,omitempty"` Slowlog *SlowlogSettings `json:"slowlog,omitempty"` diff --git a/typedapi/types/settingssimilarity.go b/typedapi/types/settingssimilarity.go old mode 100755 new mode 100644 index 956f5f5729..6a461e9e92 --- a/typedapi/types/settingssimilarity.go +++ b/typedapi/types/settingssimilarity.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SettingsSimilarity type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L170-L178 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L170-L178 type SettingsSimilarity struct { Bm25 *SettingsSimilarityBm25 `json:"bm25,omitempty"` Dfi *SettingsSimilarityDfi `json:"dfi,omitempty"` diff --git a/typedapi/types/settingssimilaritybm25.go b/typedapi/types/settingssimilaritybm25.go old mode 100755 new mode 100644 index 85a12d4638..608ee72da9 --- a/typedapi/types/settingssimilaritybm25.go +++ b/typedapi/types/settingssimilaritybm25.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SettingsSimilarityBm25 type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L180-L185 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L180-L185 type SettingsSimilarityBm25 struct { B Float64 `json:"b"` DiscountOverlaps bool `json:"discount_overlaps"` @@ -30,6 +40,77 @@ type SettingsSimilarityBm25 struct { Type string `json:"type,omitempty"` } +func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "b": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.B = f + case float64: + f := Float64(v) + s.B = f + } + + case "discount_overlaps": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DiscountOverlaps = value + case bool: + s.DiscountOverlaps = v + } + + case "k1": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.K1 = f + case float64: + f := Float64(v) + s.K1 = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewSettingsSimilarityBm25 returns a SettingsSimilarityBm25. func NewSettingsSimilarityBm25() *SettingsSimilarityBm25 { r := &SettingsSimilarityBm25{} diff --git a/typedapi/types/settingssimilaritydfi.go b/typedapi/types/settingssimilaritydfi.go old mode 100755 new mode 100644 index bef5c091b0..9428416442 --- a/typedapi/types/settingssimilaritydfi.go +++ b/typedapi/types/settingssimilaritydfi.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // SettingsSimilarityDfi type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L187-L190 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L187-L190 type SettingsSimilarityDfi struct { IndependenceMeasure dfiindependencemeasure.DFIIndependenceMeasure `json:"independence_measure"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/settingssimilaritydfr.go b/typedapi/types/settingssimilaritydfr.go old mode 100755 new mode 100644 index 0386c09285..a09f6c3152 --- a/typedapi/types/settingssimilaritydfr.go +++ b/typedapi/types/settingssimilaritydfr.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,7 +28,7 @@ import ( // SettingsSimilarityDfr type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L192-L197 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L192-L197 type SettingsSimilarityDfr struct { AfterEffect dfraftereffect.DFRAfterEffect `json:"after_effect"` BasicModel dfrbasicmodel.DFRBasicModel `json:"basic_model"` diff --git a/typedapi/types/settingssimilarityib.go b/typedapi/types/settingssimilarityib.go old mode 100755 new mode 100644 index db5b5baaee..bf8ee57e0c --- a/typedapi/types/settingssimilarityib.go +++ b/typedapi/types/settingssimilarityib.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,7 +28,7 @@ import ( // SettingsSimilarityIb type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L199-L204 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L199-L204 type SettingsSimilarityIb struct { Distribution ibdistribution.IBDistribution `json:"distribution"` Lambda iblambda.IBLambda `json:"lambda"` diff --git a/typedapi/types/settingssimilaritylmd.go b/typedapi/types/settingssimilaritylmd.go old mode 100755 new mode 100644 index fd7a175091..965ab28349 --- a/typedapi/types/settingssimilaritylmd.go +++ b/typedapi/types/settingssimilaritylmd.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SettingsSimilarityLmd type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L206-L209 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L206-L209 type SettingsSimilarityLmd struct { Mu int `json:"mu"` Type string `json:"type,omitempty"` } +func (s *SettingsSimilarityLmd) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mu": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Mu = value + case float64: + f := int(v) + s.Mu = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewSettingsSimilarityLmd returns a SettingsSimilarityLmd. func NewSettingsSimilarityLmd() *SettingsSimilarityLmd { r := &SettingsSimilarityLmd{} diff --git a/typedapi/types/settingssimilaritylmj.go b/typedapi/types/settingssimilaritylmj.go old mode 100755 new mode 100644 index 55bba055aa..7043ae16cf --- a/typedapi/types/settingssimilaritylmj.go +++ b/typedapi/types/settingssimilaritylmj.go @@ -16,18 +16,69 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SettingsSimilarityLmj type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L211-L214 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L211-L214 type SettingsSimilarityLmj struct { Lambda Float64 `json:"lambda"` Type string `json:"type,omitempty"` } +func (s *SettingsSimilarityLmj) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lambda": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Lambda = f + case float64: + f := Float64(v) + s.Lambda = f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewSettingsSimilarityLmj returns a SettingsSimilarityLmj. func NewSettingsSimilarityLmj() *SettingsSimilarityLmj { r := &SettingsSimilarityLmj{} diff --git a/typedapi/types/settingssimilarityscriptedtfidf.go b/typedapi/types/settingssimilarityscriptedtfidf.go old mode 100755 new mode 100644 index af177ce58b..71fa1a7df6 --- a/typedapi/types/settingssimilarityscriptedtfidf.go +++ b/typedapi/types/settingssimilarityscriptedtfidf.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SettingsSimilarityScriptedTfidf type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L216-L219 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L216-L219 type SettingsSimilarityScriptedTfidf struct { Script Script `json:"script"` Type string `json:"type,omitempty"` } +func (s *SettingsSimilarityScriptedTfidf) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewSettingsSimilarityScriptedTfidf returns a SettingsSimilarityScriptedTfidf. func NewSettingsSimilarityScriptedTfidf() *SettingsSimilarityScriptedTfidf { r := &SettingsSimilarityScriptedTfidf{} diff --git a/typedapi/types/shapefieldquery.go b/typedapi/types/shapefieldquery.go old mode 100755 new mode 100644 index 0bd351cba0..373c1082ec --- a/typedapi/types/shapefieldquery.go +++ b/typedapi/types/shapefieldquery.go @@ -16,25 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geoshaperelation" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L183-L187 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L183-L187 type ShapeFieldQuery struct { IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` Relation *geoshaperelation.GeoShapeRelation `json:"relation,omitempty"` Shape json.RawMessage `json:"shape,omitempty"` } +func (s *ShapeFieldQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indexed_shape": + if err := dec.Decode(&s.IndexedShape); err != nil { + return err + } + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return err + } + + case "shape": + if err := dec.Decode(&s.Shape); err != nil { + return err + } + + } + } + return nil +} + // NewShapeFieldQuery returns a ShapeFieldQuery. func NewShapeFieldQuery() *ShapeFieldQuery { r := &ShapeFieldQuery{} diff --git a/typedapi/types/shapeproperty.go b/typedapi/types/shapeproperty.go old mode 100755 new mode 100644 index 8f901dc551..bd51e82aa9 --- a/typedapi/types/shapeproperty.go +++ b/typedapi/types/shapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/geo.ts#L69-L81 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/geo.ts#L69-L81 type ShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -53,6 +55,7 @@ type ShapeProperty struct { } func (s *ShapeProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,18 +70,47 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { switch t { case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -87,6 +119,9 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -374,28 +409,60 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "ignore_z_value": - if err := dec.Decode(&s.IgnoreZValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreZValue = &value + case bool: + s.IgnoreZValue = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -406,6 +473,9 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -693,20 +763,32 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/shapequery.go b/typedapi/types/shapequery.go old mode 100755 new mode 100644 index 769e2e4bd1..5b7c840b99 --- a/typedapi/types/shapequery.go +++ b/typedapi/types/shapequery.go @@ -16,23 +16,98 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/specialized.ts#L176-L181 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/specialized.ts#L176-L181 type ShapeQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` - ShapeQuery map[string]ShapeFieldQuery `json:"-"` + ShapeQuery map[string]ShapeFieldQuery `json:"ShapeQuery,omitempty"` +} + +func (s *ShapeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "ignore_unmapped": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreUnmapped = &value + case bool: + s.IgnoreUnmapped = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "ShapeQuery": + if s.ShapeQuery == nil { + s.ShapeQuery = make(map[string]ShapeFieldQuery, 0) + } + if err := dec.Decode(&s.ShapeQuery); err != nil { + return err + } + + default: + + } + } + return nil } // MarhsalJSON overrides marshalling for types with additional properties @@ -54,6 +129,7 @@ func (s ShapeQuery) MarshalJSON() ([]byte, error) { for key, value := range s.ShapeQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "ShapeQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/shardcommit.go b/typedapi/types/shardcommit.go old mode 100755 new mode 100644 index 20d5830342..b52b570365 --- a/typedapi/types/shardcommit.go +++ b/typedapi/types/shardcommit.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardCommit type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L103-L108 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L103-L108 type ShardCommit struct { Generation int `json:"generation"` Id string `json:"id"` @@ -30,6 +40,70 @@ type ShardCommit struct { UserData map[string]string `json:"user_data"` } +func (s *ShardCommit) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "generation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Generation = value + case float64: + f := int(v) + s.Generation = f + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "num_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumDocs = value + case float64: + f := int64(v) + s.NumDocs = f + } + + case "user_data": + if s.UserData == nil { + s.UserData = make(map[string]string, 0) + } + if err := dec.Decode(&s.UserData); err != nil { + return err + } + + } + } + return nil +} + // NewShardCommit returns a ShardCommit. func NewShardCommit() *ShardCommit { r := &ShardCommit{ diff --git a/typedapi/types/shardfailure.go b/typedapi/types/shardfailure.go old mode 100755 new mode 100644 index d942730abd..9ecfc1a06d --- a/typedapi/types/shardfailure.go +++ b/typedapi/types/shardfailure.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Errors.ts#L50-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Errors.ts#L50-L56 type ShardFailure struct { Index *string `json:"index,omitempty"` Node *string `json:"node,omitempty"` @@ -31,6 +41,68 @@ type ShardFailure struct { Status *string `json:"status,omitempty"` } +func (s *ShardFailure) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = &o + + case "reason": + if err := dec.Decode(&s.Reason); err != nil { + return err + } + + case "shard": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Shard = value + case float64: + f := int(v) + s.Shard = f + } + + case "status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = &o + + } + } + return nil +} + // NewShardFailure returns a ShardFailure. func NewShardFailure() *ShardFailure { r := &ShardFailure{} diff --git a/typedapi/types/shardfilesizeinfo.go b/typedapi/types/shardfilesizeinfo.go old mode 100755 new mode 100644 index 03cce57a16..68ed7b2d3d --- a/typedapi/types/shardfilesizeinfo.go +++ b/typedapi/types/shardfilesizeinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardFileSizeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L115-L122 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L115-L122 type ShardFileSizeInfo struct { AverageSizeInBytes *int64 `json:"average_size_in_bytes,omitempty"` Count *int64 `json:"count,omitempty"` @@ -32,6 +42,109 @@ type ShardFileSizeInfo struct { SizeInBytes int64 `json:"size_in_bytes"` } +func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "average_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.AverageSizeInBytes = &value + case float64: + f := int64(v) + s.AverageSizeInBytes = &f + } + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = o + + case "max_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MaxSizeInBytes = &value + case float64: + f := int64(v) + s.MaxSizeInBytes = &f + } + + case "min_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinSizeInBytes = &value + case float64: + f := int64(v) + s.MinSizeInBytes = &f + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewShardFileSizeInfo returns a ShardFileSizeInfo. func NewShardFileSizeInfo() *ShardFileSizeInfo { r := &ShardFileSizeInfo{} diff --git a/typedapi/types/shardhealthstats.go b/typedapi/types/shardhealthstats.go old mode 100755 new mode 100644 index 9692c19d28..c6eca8b221 --- a/typedapi/types/shardhealthstats.go +++ b/typedapi/types/shardhealthstats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/healthstatus" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ShardHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/health/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/health/types.ts#L36-L43 type ShardHealthStats struct { ActiveShards int `json:"active_shards"` InitializingShards int `json:"initializing_shards"` @@ -36,6 +44,109 @@ type ShardHealthStats struct { UnassignedShards int `json:"unassigned_shards"` } +func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ActiveShards = value + case float64: + f := int(v) + s.ActiveShards = f + } + + case "initializing_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InitializingShards = value + case float64: + f := int(v) + s.InitializingShards = f + } + + case "primary_active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PrimaryActive = value + case bool: + s.PrimaryActive = v + } + + case "relocating_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RelocatingShards = value + case float64: + f := int(v) + s.RelocatingShards = f + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "unassigned_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UnassignedShards = value + case float64: + f := int(v) + s.UnassignedShards = f + } + + } + } + return nil +} + // NewShardHealthStats returns a ShardHealthStats. func NewShardHealthStats() *ShardHealthStats { r := &ShardHealthStats{} diff --git a/typedapi/types/shardlease.go b/typedapi/types/shardlease.go old mode 100755 new mode 100644 index 20c2d711b1..b3da413115 --- a/typedapi/types/shardlease.go +++ b/typedapi/types/shardlease.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L124-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L124-L129 type ShardLease struct { Id string `json:"id"` RetainingSeqNo int64 `json:"retaining_seq_no"` @@ -30,6 +40,59 @@ type ShardLease struct { Timestamp int64 `json:"timestamp"` } +func (s *ShardLease) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "retaining_seq_no": + if err := dec.Decode(&s.RetainingSeqNo); err != nil { + return err + } + + case "source": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Source = o + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = value + case float64: + f := int64(v) + s.Timestamp = f + } + + } + } + return nil +} + // NewShardLease returns a ShardLease. func NewShardLease() *ShardLease { r := &ShardLease{} diff --git a/typedapi/types/shardmigrationstatus.go b/typedapi/types/shardmigrationstatus.go old mode 100755 new mode 100644 index b717ad84f8..db29576013 --- a/typedapi/types/shardmigrationstatus.go +++ b/typedapi/types/shardmigrationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // ShardMigrationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 type ShardMigrationStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/shardpath.go b/typedapi/types/shardpath.go old mode 100755 new mode 100644 index 7b551eb15d..5328710078 --- a/typedapi/types/shardpath.go +++ b/typedapi/types/shardpath.go @@ -16,19 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L131-L135 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L131-L135 type ShardPath struct { DataPath string `json:"data_path"` IsCustomDataPath bool `json:"is_custom_data_path"` StatePath string `json:"state_path"` } +func (s *ShardPath) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataPath = o + + case "is_custom_data_path": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IsCustomDataPath = value + case bool: + s.IsCustomDataPath = v + } + + case "state_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StatePath = o + + } + } + return nil +} + // NewShardPath returns a ShardPath. func NewShardPath() *ShardPath { r := &ShardPath{} diff --git a/typedapi/types/shardprofile.go b/typedapi/types/shardprofile.go old mode 100755 new mode 100644 index 949e4b822f..18274c99f1 --- a/typedapi/types/shardprofile.go +++ b/typedapi/types/shardprofile.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ShardProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/profile.ts#L132-L137 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/profile.ts#L132-L137 type ShardProfile struct { Aggregations []AggregationProfile `json:"aggregations"` Fetch *FetchProfile `json:"fetch,omitempty"` diff --git a/typedapi/types/shardquerycache.go b/typedapi/types/shardquerycache.go old mode 100755 new mode 100644 index 904cd2d7f1..d4585ceaaf --- a/typedapi/types/shardquerycache.go +++ b/typedapi/types/shardquerycache.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardQueryCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L137-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L137-L145 type ShardQueryCache struct { CacheCount int64 `json:"cache_count"` CacheSize int64 `json:"cache_size"` @@ -33,6 +43,131 @@ type ShardQueryCache struct { TotalCount int64 `json:"total_count"` } +func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CacheCount = value + case float64: + f := int64(v) + s.CacheCount = f + } + + case "cache_size": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CacheSize = value + case float64: + f := int64(v) + s.CacheSize = f + } + + case "evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Evictions = value + case float64: + f := int64(v) + s.Evictions = f + } + + case "hit_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.HitCount = value + case float64: + f := int64(v) + s.HitCount = f + } + + case "memory_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MemorySizeInBytes = value + case float64: + f := int64(v) + s.MemorySizeInBytes = f + } + + case "miss_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MissCount = value + case float64: + f := int64(v) + s.MissCount = f + } + + case "total_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalCount = value + case float64: + f := int64(v) + s.TotalCount = f + } + + } + } + return nil +} + // NewShardQueryCache returns a ShardQueryCache. func NewShardQueryCache() *ShardQueryCache { r := &ShardQueryCache{} diff --git a/typedapi/types/shardrecovery.go b/typedapi/types/shardrecovery.go old mode 100755 new mode 100644 index ca93c9d9f3..fbfeb4ac41 --- a/typedapi/types/shardrecovery.go +++ b/typedapi/types/shardrecovery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L118-L135 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L118-L135 type ShardRecovery struct { Id int64 `json:"id"` Index RecoveryIndexStatus `json:"index"` @@ -42,6 +52,131 @@ type ShardRecovery struct { VerifyIndex VerifyIndex `json:"verify_index"` } +func (s *ShardRecovery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = value + case float64: + f := int64(v) + s.Id = f + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Primary = value + case bool: + s.Primary = v + } + + case "source": + if err := dec.Decode(&s.Source); err != nil { + return err + } + + case "stage": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Stage = o + + case "start": + if err := dec.Decode(&s.Start); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "stop_time": + if err := dec.Decode(&s.StopTime); err != nil { + return err + } + + case "stop_time_in_millis": + if err := dec.Decode(&s.StopTimeInMillis); err != nil { + return err + } + + case "target": + if err := dec.Decode(&s.Target); err != nil { + return err + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + case "translog": + if err := dec.Decode(&s.Translog); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + case "verify_index": + if err := dec.Decode(&s.VerifyIndex); err != nil { + return err + } + + } + } + return nil +} + // NewShardRecovery returns a ShardRecovery. func NewShardRecovery() *ShardRecovery { r := &ShardRecovery{} diff --git a/typedapi/types/shardretentionleases.go b/typedapi/types/shardretentionleases.go old mode 100755 new mode 100644 index cbf495e684..c106718ed7 --- a/typedapi/types/shardretentionleases.go +++ b/typedapi/types/shardretentionleases.go @@ -16,19 +16,74 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardRetentionLeases type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L147-L151 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L147-L151 type ShardRetentionLeases struct { Leases []ShardLease `json:"leases"` PrimaryTerm int64 `json:"primary_term"` Version int64 `json:"version"` } +func (s *ShardRetentionLeases) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "leases": + if err := dec.Decode(&s.Leases); err != nil { + return err + } + + case "primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm = value + case float64: + f := int64(v) + s.PrimaryTerm = f + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewShardRetentionLeases returns a ShardRetentionLeases. func NewShardRetentionLeases() *ShardRetentionLeases { r := &ShardRetentionLeases{} diff --git a/typedapi/types/shardrouting.go b/typedapi/types/shardrouting.go old mode 100755 new mode 100644 index a5a1458cd7..009b514dc9 --- a/typedapi/types/shardrouting.go +++ b/typedapi/types/shardrouting.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shardroutingstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // ShardRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L153-L158 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L153-L158 type ShardRouting struct { Node string `json:"node"` Primary bool `json:"primary"` @@ -34,6 +42,61 @@ type ShardRouting struct { State shardroutingstate.ShardRoutingState `json:"state"` } +func (s *ShardRouting) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Primary = value + case bool: + s.Primary = v + } + + case "relocating_node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RelocatingNode = o + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + } + } + return nil +} + // NewShardRouting returns a ShardRouting. func NewShardRouting() *ShardRouting { r := &ShardRouting{} diff --git a/typedapi/types/shardsavailabilityindicator.go b/typedapi/types/shardsavailabilityindicator.go new file mode 100644 index 0000000000..ff8b976095 --- /dev/null +++ b/typedapi/types/shardsavailabilityindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// ShardsAvailabilityIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L103-L107 +type ShardsAvailabilityIndicator struct { + Details *ShardsAvailabilityIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewShardsAvailabilityIndicator returns a ShardsAvailabilityIndicator. +func NewShardsAvailabilityIndicator() *ShardsAvailabilityIndicator { + r := &ShardsAvailabilityIndicator{} + + return r +} diff --git a/typedapi/types/shardsavailabilityindicatordetails.go b/typedapi/types/shardsavailabilityindicatordetails.go new file mode 100644 index 0000000000..1a7d33078e --- /dev/null +++ b/typedapi/types/shardsavailabilityindicatordetails.go @@ -0,0 +1,208 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// ShardsAvailabilityIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L108-L118 +type ShardsAvailabilityIndicatorDetails struct { + CreatingPrimaries int64 `json:"creating_primaries"` + InitializingPrimaries int64 `json:"initializing_primaries"` + InitializingReplicas int64 `json:"initializing_replicas"` + RestartingPrimaries int64 `json:"restarting_primaries"` + RestartingReplicas int64 `json:"restarting_replicas"` + StartedPrimaries int64 `json:"started_primaries"` + StartedReplicas int64 `json:"started_replicas"` + UnassignedPrimaries int64 `json:"unassigned_primaries"` + UnassignedReplicas int64 `json:"unassigned_replicas"` +} + +func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "creating_primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.CreatingPrimaries = value + case float64: + f := int64(v) + s.CreatingPrimaries = f + } + + case "initializing_primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InitializingPrimaries = value + case float64: + f := int64(v) + s.InitializingPrimaries = f + } + + case "initializing_replicas": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.InitializingReplicas = value + case float64: + f := int64(v) + s.InitializingReplicas = f + } + + case "restarting_primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RestartingPrimaries = value + case float64: + f := int64(v) + s.RestartingPrimaries = f + } + + case "restarting_replicas": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RestartingReplicas = value + case float64: + f := int64(v) + s.RestartingReplicas = f + } + + case "started_primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StartedPrimaries = value + case float64: + f := int64(v) + s.StartedPrimaries = f + } + + case "started_replicas": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.StartedReplicas = value + case float64: + f := int64(v) + s.StartedReplicas = f + } + + case "unassigned_primaries": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UnassignedPrimaries = value + case float64: + f := int64(v) + s.UnassignedPrimaries = f + } + + case "unassigned_replicas": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UnassignedReplicas = value + case float64: + f := int64(v) + s.UnassignedReplicas = f + } + + } + } + return nil +} + +// NewShardsAvailabilityIndicatorDetails returns a ShardsAvailabilityIndicatorDetails. +func NewShardsAvailabilityIndicatorDetails() *ShardsAvailabilityIndicatorDetails { + r := &ShardsAvailabilityIndicatorDetails{} + + return r +} diff --git a/typedapi/types/shardsegmentrouting.go b/typedapi/types/shardsegmentrouting.go old mode 100755 new mode 100644 index c719b3b897..7c4a7b4fb6 --- a/typedapi/types/shardsegmentrouting.go +++ b/typedapi/types/shardsegmentrouting.go @@ -16,19 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardSegmentRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/segments/types.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/segments/types.ts#L40-L44 type ShardSegmentRouting struct { Node string `json:"node"` Primary bool `json:"primary"` State string `json:"state"` } +func (s *ShardSegmentRouting) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "primary": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Primary = value + case bool: + s.Primary = v + } + + case "state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = o + + } + } + return nil +} + // NewShardSegmentRouting returns a ShardSegmentRouting. func NewShardSegmentRouting() *ShardSegmentRouting { r := &ShardSegmentRouting{} diff --git a/typedapi/types/shardsequencenumber.go b/typedapi/types/shardsequencenumber.go old mode 100755 new mode 100644 index a1318e6d65..bc42171009 --- a/typedapi/types/shardsequencenumber.go +++ b/typedapi/types/shardsequencenumber.go @@ -16,19 +16,84 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardSequenceNumber type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L167-L171 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L167-L171 type ShardSequenceNumber struct { GlobalCheckpoint int64 `json:"global_checkpoint"` LocalCheckpoint int64 `json:"local_checkpoint"` MaxSeqNo int64 `json:"max_seq_no"` } +func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "global_checkpoint": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.GlobalCheckpoint = value + case float64: + f := int64(v) + s.GlobalCheckpoint = f + } + + case "local_checkpoint": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LocalCheckpoint = value + case float64: + f := int64(v) + s.LocalCheckpoint = f + } + + case "max_seq_no": + if err := dec.Decode(&s.MaxSeqNo); err != nil { + return err + } + + } + } + return nil +} + // NewShardSequenceNumber returns a ShardSequenceNumber. func NewShardSequenceNumber() *ShardSequenceNumber { r := &ShardSequenceNumber{} diff --git a/typedapi/types/shardsrecord.go b/typedapi/types/shardsrecord.go old mode 100755 new mode 100644 index d60d073408..821d5a01d8 --- a/typedapi/types/shardsrecord.go +++ b/typedapi/types/shardsrecord.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // ShardsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/shards/types.ts#L20-L396 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/shards/types.ts#L20-L396 type ShardsRecord struct { // BulkAvgSizeInBytes avg size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -179,6 +186,634 @@ type ShardsRecord struct { WarmerTotalTime *string `json:"warmer.total_time,omitempty"` } +func (s *ShardsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bulk.avg_size_in_bytes", "basi", "bulkAvgSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgSizeInBytes = &o + + case "bulk.avg_time", "bati", "bulkAvgTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkAvgTime = &o + + case "bulk.total_operations", "bto", "bulkTotalOperations": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalOperations = &o + + case "bulk.total_size_in_bytes", "btsi", "bulkTotalSizeInBytes": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalSizeInBytes = &o + + case "bulk.total_time", "btti", "bulkTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.BulkTotalTime = &o + + case "completion.size", "cs", "completionSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompletionSize = &o + + case "docs", "d", "dc": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Docs = o + + case "fielddata.evictions", "fe", "fielddataEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataEvictions = &o + + case "fielddata.memory_size", "fm", "fielddataMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FielddataMemorySize = &o + + case "flush.total", "ft", "flushTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotal = &o + + case "flush.total_time", "ftt", "flushTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FlushTotalTime = &o + + case "get.current", "gc", "getCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetCurrent = &o + + case "get.exists_time", "geti", "getExistsTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTime = &o + + case "get.exists_total", "geto", "getExistsTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetExistsTotal = &o + + case "get.missing_time", "gmti", "getMissingTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTime = &o + + case "get.missing_total", "gmto", "getMissingTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetMissingTotal = &o + + case "get.time", "gti", "getTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTime = &o + + case "get.total", "gto", "getTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.GetTotal = &o + + case "id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Id = &o + + case "index", "i", "idx": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Index = &o + + case "indexing.delete_current", "idc", "indexingDeleteCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteCurrent = &o + + case "indexing.delete_time", "idti", "indexingDeleteTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTime = &o + + case "indexing.delete_total", "idto", "indexingDeleteTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingDeleteTotal = &o + + case "indexing.index_current", "iic", "indexingIndexCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexCurrent = &o + + case "indexing.index_failed", "iif", "indexingIndexFailed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexFailed = &o + + case "indexing.index_time", "iiti", "indexingIndexTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTime = &o + + case "indexing.index_total", "iito", "indexingIndexTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexingIndexTotal = &o + + case "ip": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Ip = o + + case "merges.current", "mc", "mergesCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrent = &o + + case "merges.current_docs", "mcd", "mergesCurrentDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentDocs = &o + + case "merges.current_size", "mcs", "mergesCurrentSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesCurrentSize = &o + + case "merges.total", "mt", "mergesTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotal = &o + + case "merges.total_docs", "mtd", "mergesTotalDocs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalDocs = &o + + case "merges.total_size", "mts", "mergesTotalSize": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalSize = &o + + case "merges.total_time", "mtt", "mergesTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MergesTotalTime = &o + + case "node", "n": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "path.data", "pd", "dataPath": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PathData = &o + + case "path.state", "ps", "statsPath": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PathState = &o + + case "prirep", "p", "pr", "primaryOrReplica": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Prirep = &o + + case "query_cache.evictions", "qce", "queryCacheEvictions": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheEvictions = &o + + case "query_cache.memory_size", "qcm", "queryCacheMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryCacheMemorySize = &o + + case "recoverysource.type", "rs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RecoverysourceType = &o + + case "refresh.external_time", "rti", "refreshTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTime = &o + + case "refresh.external_total", "rto", "refreshTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshExternalTotal = &o + + case "refresh.listeners", "rli", "refreshListeners": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshListeners = &o + + case "refresh.time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTime = &o + + case "refresh.total": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RefreshTotal = &o + + case "search.fetch_current", "sfc", "searchFetchCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchCurrent = &o + + case "search.fetch_time", "sfti", "searchFetchTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTime = &o + + case "search.fetch_total", "sfto", "searchFetchTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFetchTotal = &o + + case "search.open_contexts", "so", "searchOpenContexts": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchOpenContexts = &o + + case "search.query_current", "sqc", "searchQueryCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryCurrent = &o + + case "search.query_time", "sqti", "searchQueryTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTime = &o + + case "search.query_total", "sqto", "searchQueryTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchQueryTotal = &o + + case "search.scroll_current", "scc", "searchScrollCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollCurrent = &o + + case "search.scroll_time", "scti", "searchScrollTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTime = &o + + case "search.scroll_total", "scto", "searchScrollTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchScrollTotal = &o + + case "segments.count", "sc", "segmentsCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsCount = &o + + case "segments.fixed_bitset_memory", "sfbm", "fixedBitsetMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsFixedBitsetMemory = &o + + case "segments.index_writer_memory", "siwm", "segmentsIndexWriterMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsIndexWriterMemory = &o + + case "segments.memory", "sm", "segmentsMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsMemory = &o + + case "segments.version_map_memory", "svmm", "segmentsVersionMapMemory": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SegmentsVersionMapMemory = &o + + case "seq_no.global_checkpoint", "sqg", "globalCheckpoint": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SeqNoGlobalCheckpoint = &o + + case "seq_no.local_checkpoint", "sql", "localCheckpoint": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SeqNoLocalCheckpoint = &o + + case "seq_no.max", "sqm", "maxSeqNo": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SeqNoMax = &o + + case "shard", "s", "sh": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Shard = &o + + case "state", "st": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = &o + + case "store", "sto": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Store = o + + case "sync_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SyncId = &o + + case "unassigned.at", "ua": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UnassignedAt = &o + + case "unassigned.details", "ud": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UnassignedDetails = &o + + case "unassigned.for", "uf": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UnassignedFor = &o + + case "unassigned.reason", "ur": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UnassignedReason = &o + + case "warmer.current", "wc", "warmerCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerCurrent = &o + + case "warmer.total", "wto", "warmerTotal": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerTotal = &o + + case "warmer.total_time", "wtt", "warmerTotalTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.WarmerTotalTime = &o + + } + } + return nil +} + // NewShardsRecord returns a ShardsRecord. func NewShardsRecord() *ShardsRecord { r := &ShardsRecord{} diff --git a/typedapi/types/shardssegment.go b/typedapi/types/shardssegment.go old mode 100755 new mode 100644 index 568076270a..e6340280e1 --- a/typedapi/types/shardssegment.go +++ b/typedapi/types/shardssegment.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardsSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/segments/types.ts#L46-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/segments/types.ts#L46-L51 type ShardsSegment struct { NumCommittedSegments int `json:"num_committed_segments"` NumSearchSegments int `json:"num_search_segments"` @@ -30,6 +40,71 @@ type ShardsSegment struct { Segments map[string]Segment `json:"segments"` } +func (s *ShardsSegment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_committed_segments": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumCommittedSegments = value + case float64: + f := int(v) + s.NumCommittedSegments = f + } + + case "num_search_segments": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumSearchSegments = value + case float64: + f := int(v) + s.NumSearchSegments = f + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "segments": + if s.Segments == nil { + s.Segments = make(map[string]Segment, 0) + } + if err := dec.Decode(&s.Segments); err != nil { + return err + } + + } + } + return nil +} + // NewShardsSegment returns a ShardsSegment. func NewShardsSegment() *ShardsSegment { r := &ShardsSegment{ diff --git a/typedapi/types/shardsstatssummary.go b/typedapi/types/shardsstatssummary.go old mode 100755 new mode 100644 index 0ee0b0a315..2dc8f7f6b5 --- a/typedapi/types/shardsstatssummary.go +++ b/typedapi/types/shardsstatssummary.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ShardsStatsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 type ShardsStatsSummary struct { Incremental ShardsStatsSummaryItem `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` @@ -31,6 +39,51 @@ type ShardsStatsSummary struct { Total ShardsStatsSummaryItem `json:"total"` } +func (s *ShardsStatsSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "incremental": + if err := dec.Decode(&s.Incremental); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "time": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewShardsStatsSummary returns a ShardsStatsSummary. func NewShardsStatsSummary() *ShardsStatsSummary { r := &ShardsStatsSummary{} diff --git a/typedapi/types/shardsstatssummaryitem.go b/typedapi/types/shardsstatssummaryitem.go old mode 100755 new mode 100644 index e9d498ea0c..62b83500e9 --- a/typedapi/types/shardsstatssummaryitem.go +++ b/typedapi/types/shardsstatssummaryitem.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardsStatsSummaryItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 type ShardsStatsSummaryItem struct { FileCount int64 `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` } +func (s *ShardsStatsSummaryItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "file_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.FileCount = value + case float64: + f := int64(v) + s.FileCount = f + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + } + } + return nil +} + // NewShardsStatsSummaryItem returns a ShardsStatsSummaryItem. func NewShardsStatsSummaryItem() *ShardsStatsSummaryItem { r := &ShardsStatsSummaryItem{} diff --git a/typedapi/types/shardstatistics.go b/typedapi/types/shardstatistics.go old mode 100755 new mode 100644 index 9fd66d568d..bd71e84d71 --- a/typedapi/types/shardstatistics.go +++ b/typedapi/types/shardstatistics.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ShardStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L33-L39 type ShardStatistics struct { Failed uint `json:"failed"` Failures []ShardFailure `json:"failures,omitempty"` diff --git a/typedapi/types/shardstore.go b/typedapi/types/shardstore.go old mode 100755 new mode 100644 index 7f5df755e5..c422190bdb --- a/typedapi/types/shardstore.go +++ b/typedapi/types/shardstore.go @@ -16,27 +16,77 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/shardstoreallocation" - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // ShardStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L30-L34 type ShardStore struct { Allocation shardstoreallocation.ShardStoreAllocation `json:"allocation"` AllocationId *string `json:"allocation_id,omitempty"` - ShardStore map[string]ShardStoreNode `json:"-"` + ShardStore map[string]ShardStoreNode `json:"ShardStore,omitempty"` StoreException *ShardStoreException `json:"store_exception,omitempty"` } +func (s *ShardStore) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation": + if err := dec.Decode(&s.Allocation); err != nil { + return err + } + + case "allocation_id": + if err := dec.Decode(&s.AllocationId); err != nil { + return err + } + + case "ShardStore": + if s.ShardStore == nil { + s.ShardStore = make(map[string]ShardStoreNode, 0) + } + if err := dec.Decode(&s.ShardStore); err != nil { + return err + } + + case "store_exception": + if err := dec.Decode(&s.StoreException); err != nil { + return err + } + + default: + + } + } + return nil +} + // MarhsalJSON overrides marshalling for types with additional properties func (s ShardStore) MarshalJSON() ([]byte, error) { type opt ShardStore @@ -56,6 +106,7 @@ func (s ShardStore) MarshalJSON() ([]byte, error) { for key, value := range s.ShardStore { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "ShardStore") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/shardstoreexception.go b/typedapi/types/shardstoreexception.go old mode 100755 new mode 100644 index def1249812..58920278c8 --- a/typedapi/types/shardstoreexception.go +++ b/typedapi/types/shardstoreexception.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ShardStoreException type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L51-L54 type ShardStoreException struct { Reason string `json:"reason"` Type string `json:"type"` diff --git a/typedapi/types/shardstoreindex.go b/typedapi/types/shardstoreindex.go old mode 100755 new mode 100644 index 06da9012c1..5643526964 --- a/typedapi/types/shardstoreindex.go +++ b/typedapi/types/shardstoreindex.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ShardStoreIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 type ShardStoreIndex struct { Aliases []string `json:"aliases,omitempty"` Filter *Query `json:"filter,omitempty"` } +func (s *ShardStoreIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + } + } + return nil +} + // NewShardStoreIndex returns a ShardStoreIndex. func NewShardStoreIndex() *ShardStoreIndex { r := &ShardStoreIndex{} diff --git a/typedapi/types/shardstorenode.go b/typedapi/types/shardstorenode.go old mode 100755 new mode 100644 index 048236b8e7..94833fb290 --- a/typedapi/types/shardstorenode.go +++ b/typedapi/types/shardstorenode.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ShardStoreNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L36-L43 type ShardStoreNode struct { Attributes map[string]string `json:"attributes"` EphemeralId *string `json:"ephemeral_id,omitempty"` @@ -32,6 +40,65 @@ type ShardStoreNode struct { TransportAddress string `json:"transport_address"` } +func (s *ShardStoreNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "ephemeral_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.EphemeralId = &o + + case "external_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ExternalId = &o + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewShardStoreNode returns a ShardStoreNode. func NewShardStoreNode() *ShardStoreNode { r := &ShardStoreNode{ diff --git a/typedapi/types/shardstorewrapper.go b/typedapi/types/shardstorewrapper.go old mode 100755 new mode 100644 index 0f47703b65..4d451a04b4 --- a/typedapi/types/shardstorewrapper.go +++ b/typedapi/types/shardstorewrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ShardStoreWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/shard_stores/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/shard_stores/types.ts#L56-L58 type ShardStoreWrapper struct { Stores []ShardStore `json:"stores"` } diff --git a/typedapi/types/shardstotalstats.go b/typedapi/types/shardstotalstats.go old mode 100755 new mode 100644 index 725982bad7..cb0ded6661 --- a/typedapi/types/shardstotalstats.go +++ b/typedapi/types/shardstotalstats.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShardsTotalStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/stats/types.ts#L173-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/stats/types.ts#L173-L175 type ShardsTotalStats struct { TotalCount int64 `json:"total_count"` } +func (s *ShardsTotalStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "total_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalCount = value + case float64: + f := int64(v) + s.TotalCount = f + } + + } + } + return nil +} + // NewShardsTotalStats returns a ShardsTotalStats. func NewShardsTotalStats() *ShardsTotalStats { r := &ShardsTotalStats{} diff --git a/typedapi/types/shared.go b/typedapi/types/shared.go old mode 100755 new mode 100644 index 67c7f6f36b..e173f066e3 --- a/typedapi/types/shared.go +++ b/typedapi/types/shared.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Shared type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 type Shared struct { BytesReadInBytes ByteSize `json:"bytes_read_in_bytes"` BytesWrittenInBytes ByteSize `json:"bytes_written_in_bytes"` @@ -34,6 +44,107 @@ type Shared struct { Writes int64 `json:"writes"` } +func (s *Shared) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bytes_read_in_bytes": + if err := dec.Decode(&s.BytesReadInBytes); err != nil { + return err + } + + case "bytes_written_in_bytes": + if err := dec.Decode(&s.BytesWrittenInBytes); err != nil { + return err + } + + case "evictions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Evictions = value + case float64: + f := int64(v) + s.Evictions = f + } + + case "num_regions": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumRegions = value + case float64: + f := int(v) + s.NumRegions = f + } + + case "reads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Reads = value + case float64: + f := int64(v) + s.Reads = f + } + + case "region_size_in_bytes": + if err := dec.Decode(&s.RegionSizeInBytes); err != nil { + return err + } + + case "size_in_bytes": + if err := dec.Decode(&s.SizeInBytes); err != nil { + return err + } + + case "writes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Writes = value + case float64: + f := int64(v) + s.Writes = f + } + + } + } + return nil +} + // NewShared returns a Shared. func NewShared() *Shared { r := &Shared{} diff --git a/typedapi/types/shingletokenfilter.go b/typedapi/types/shingletokenfilter.go old mode 100755 new mode 100644 index 158f2fd07f..0f678387ac --- a/typedapi/types/shingletokenfilter.go +++ b/typedapi/types/shingletokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShingleTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L86-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L87-L95 type ShingleTokenFilter struct { FillerToken *string `json:"filler_token,omitempty"` MaxShingleSize string `json:"max_shingle_size,omitempty"` @@ -34,6 +44,96 @@ type ShingleTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filler_token": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FillerToken = &o + + case "max_shingle_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxShingleSize = o + + case "min_shingle_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinShingleSize = o + + case "output_unigrams": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OutputUnigrams = &value + case bool: + s.OutputUnigrams = &v + } + + case "output_unigrams_if_no_shingles": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OutputUnigramsIfNoShingles = &value + case bool: + s.OutputUnigramsIfNoShingles = &v + } + + case "token_separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TokenSeparator = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewShingleTokenFilter returns a ShingleTokenFilter. func NewShingleTokenFilter() *ShingleTokenFilter { r := &ShingleTokenFilter{} diff --git a/typedapi/types/shortnumberproperty.go b/typedapi/types/shortnumberproperty.go old mode 100755 new mode 100644 index 2444b5cdcf..03e4fcdcd5 --- a/typedapi/types/shortnumberproperty.go +++ b/typedapi/types/shortnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // ShortNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L156-L159 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L156-L159 type ShortNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type ShortNumberProperty struct { } func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,28 +435,60 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -426,6 +504,9 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +794,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +806,39 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/shrinkconfiguration.go b/typedapi/types/shrinkconfiguration.go old mode 100755 new mode 100644 index fc5fb05311..212fa61542 --- a/typedapi/types/shrinkconfiguration.go +++ b/typedapi/types/shrinkconfiguration.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ShrinkConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/_types/Phase.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/_types/Phase.ts#L57-L59 type ShrinkConfiguration struct { NumberOfShards int `json:"number_of_shards"` } +func (s *ShrinkConfiguration) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "number_of_shards": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfShards = value + case float64: + f := int(v) + s.NumberOfShards = f + } + + } + } + return nil +} + // NewShrinkConfiguration returns a ShrinkConfiguration. func NewShrinkConfiguration() *ShrinkConfiguration { r := &ShrinkConfiguration{} diff --git a/typedapi/types/significantlongtermsaggregate.go b/typedapi/types/significantlongtermsaggregate.go old mode 100755 new mode 100644 index 2423569ffa..561c69fecd --- a/typedapi/types/significantlongtermsaggregate.go +++ b/typedapi/types/significantlongtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SignificantLongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L587-L589 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L588-L590 type SignificantLongTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantLongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantLongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/significantlongtermsbucket.go b/typedapi/types/significantlongtermsbucket.go old mode 100755 new mode 100644 index 63a835ffe6..4e5f994ff8 --- a/typedapi/types/significantlongtermsbucket.go +++ b/typedapi/types/significantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // SignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L596-L599 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L597-L600 type SignificantLongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -45,6 +47,7 @@ type SignificantLongTermsBucket struct { } func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -58,471 +61,586 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "bg_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + s.BgCount = value + case float64: + f := int64(v) + s.BgCount = f } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Key = value + case float64: + f := int64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o case "score": - if err := dec.Decode(&s.Score); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = f + case float64: + f := Float64(v) + s.Score = f + } + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } } } @@ -549,6 +667,7 @@ func (s SignificantLongTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/significantstringtermsaggregate.go b/typedapi/types/significantstringtermsaggregate.go old mode 100755 new mode 100644 index 24d9f0a3e6..1a752f6313 --- a/typedapi/types/significantstringtermsaggregate.go +++ b/typedapi/types/significantstringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SignificantStringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L601-L603 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L602-L604 type SignificantStringTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantStringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantStringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/significantstringtermsbucket.go b/typedapi/types/significantstringtermsbucket.go old mode 100755 new mode 100644 index 541cafc8ad..035dc1cad5 --- a/typedapi/types/significantstringtermsbucket.go +++ b/typedapi/types/significantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // SignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L605-L607 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L606-L608 type SignificantStringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -44,6 +46,7 @@ type SignificantStringTermsBucket struct { } func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,466 +60,571 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "bg_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + s.BgCount = value + case float64: + f := int64(v) + s.BgCount = f } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": - if err := dec.Decode(&s.Key); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = o case "score": - if err := dec.Decode(&s.Score); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = f + case float64: + f := Float64(v) + s.Score = f + } + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } } } @@ -543,6 +651,7 @@ func (s SignificantStringTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go old mode 100755 new mode 100644 index d274cf3694..e3ffd5b1ba --- a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SignificantTermsAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L580-L585 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseSignificantLongTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantLongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantLongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go old mode 100755 new mode 100644 index bffe157a28..8e5d557bdf --- a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SignificantTermsAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L580-L585 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseSignificantStringTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]SignificantStringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []SignificantStringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/significanttermsaggregatebasevoid.go b/typedapi/types/significanttermsaggregatebasevoid.go old mode 100755 new mode 100644 index 2c2e0aa5bc..ad538d0219 --- a/typedapi/types/significanttermsaggregatebasevoid.go +++ b/typedapi/types/significanttermsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SignificantTermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L580-L585 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L581-L586 type SignificantTermsAggregateBaseVoid struct { - BgCount *int64 `json:"bg_count,omitempty"` - Buckets BucketsVoid `json:"buckets"` - DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + BgCount *int64 `json:"bg_count,omitempty"` + Buckets BucketsVoid `json:"buckets"` + DocCount *int64 `json:"doc_count,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/significanttermsaggregation.go b/typedapi/types/significanttermsaggregation.go old mode 100755 new mode 100644 index 2b87de292f..cb0fcef915 --- a/typedapi/types/significanttermsaggregation.go +++ b/typedapi/types/significanttermsaggregation.go @@ -16,19 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/termsaggregationexecutionhint" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SignificantTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L342-L358 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L342-L358 type SignificantTermsAggregation struct { BackgroundFilter *Query `json:"background_filter,omitempty"` ChiSquare *ChiSquareHeuristic `json:"chi_square,omitempty"` @@ -38,7 +44,7 @@ type SignificantTermsAggregation struct { Gnd *GoogleNormalizedDistanceHeuristic `json:"gnd,omitempty"` Include TermsInclude `json:"include,omitempty"` Jlh *EmptyObject `json:"jlh,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` Name *string `json:"name,omitempty"` @@ -49,6 +55,167 @@ type SignificantTermsAggregation struct { Size *int `json:"size,omitempty"` } +func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "background_filter": + if err := dec.Decode(&s.BackgroundFilter); err != nil { + return err + } + + case "chi_square": + if err := dec.Decode(&s.ChiSquare); err != nil { + return err + } + + case "exclude": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Exclude = append(s.Exclude, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { + return err + } + } + + case "execution_hint": + if err := dec.Decode(&s.ExecutionHint); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "gnd": + if err := dec.Decode(&s.Gnd); err != nil { + return err + } + + case "include": + if err := dec.Decode(&s.Include); err != nil { + return err + } + + case "jlh": + if err := dec.Decode(&s.Jlh); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int64(v) + s.MinDocCount = &f + } + + case "mutual_information": + if err := dec.Decode(&s.MutualInformation); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "percentage": + if err := dec.Decode(&s.Percentage); err != nil { + return err + } + + case "script_heuristic": + if err := dec.Decode(&s.ScriptHeuristic); err != nil { + return err + } + + case "shard_min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShardMinDocCount = &value + case float64: + f := int64(v) + s.ShardMinDocCount = &f + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + // NewSignificantTermsAggregation returns a SignificantTermsAggregation. func NewSignificantTermsAggregation() *SignificantTermsAggregation { r := &SignificantTermsAggregation{} diff --git a/typedapi/types/significanttextaggregation.go b/typedapi/types/significanttextaggregation.go old mode 100755 new mode 100644 index 98519cea21..0d5c964b2e --- a/typedapi/types/significanttextaggregation.go +++ b/typedapi/types/significanttextaggregation.go @@ -16,19 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/termsaggregationexecutionhint" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SignificantTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L360-L378 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L360-L378 type SignificantTextAggregation struct { BackgroundFilter *Query `json:"background_filter,omitempty"` ChiSquare *ChiSquareHeuristic `json:"chi_square,omitempty"` @@ -39,7 +45,7 @@ type SignificantTextAggregation struct { Gnd *GoogleNormalizedDistanceHeuristic `json:"gnd,omitempty"` Include []string `json:"include,omitempty"` Jlh *EmptyObject `json:"jlh,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` Name *string `json:"name,omitempty"` @@ -51,6 +57,208 @@ type SignificantTextAggregation struct { SourceFields []string `json:"source_fields,omitempty"` } +func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "background_filter": + if err := dec.Decode(&s.BackgroundFilter); err != nil { + return err + } + + case "chi_square": + if err := dec.Decode(&s.ChiSquare); err != nil { + return err + } + + case "exclude": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Exclude = append(s.Exclude, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { + return err + } + } + + case "execution_hint": + if err := dec.Decode(&s.ExecutionHint); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "filter_duplicate_text": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FilterDuplicateText = &value + case bool: + s.FilterDuplicateText = &v + } + + case "gnd": + if err := dec.Decode(&s.Gnd); err != nil { + return err + } + + case "include": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Include = append(s.Include, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Include); err != nil { + return err + } + } + + case "jlh": + if err := dec.Decode(&s.Jlh); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int64(v) + s.MinDocCount = &f + } + + case "mutual_information": + if err := dec.Decode(&s.MutualInformation); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "percentage": + if err := dec.Decode(&s.Percentage); err != nil { + return err + } + + case "script_heuristic": + if err := dec.Decode(&s.ScriptHeuristic); err != nil { + return err + } + + case "shard_min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShardMinDocCount = &value + case float64: + f := int64(v) + s.ShardMinDocCount = &f + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "source_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.SourceFields = append(s.SourceFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.SourceFields); err != nil { + return err + } + } + + } + } + return nil +} + // NewSignificantTextAggregation returns a SignificantTextAggregation. func NewSignificantTextAggregation() *SignificantTextAggregation { r := &SignificantTextAggregation{} diff --git a/typedapi/types/simpleanalyzer.go b/typedapi/types/simpleanalyzer.go old mode 100755 new mode 100644 index 87051b4975..2e4e173cb8 --- a/typedapi/types/simpleanalyzer.go +++ b/typedapi/types/simpleanalyzer.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SimpleAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L83-L86 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L83-L86 type SimpleAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *SimpleAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSimpleAnalyzer returns a SimpleAnalyzer. func NewSimpleAnalyzer() *SimpleAnalyzer { r := &SimpleAnalyzer{} diff --git a/typedapi/types/simplemovingaverageaggregation.go b/typedapi/types/simplemovingaverageaggregation.go old mode 100755 new mode 100644 index bbe13850a4..375224f4bb --- a/typedapi/types/simplemovingaverageaggregation.go +++ b/typedapi/types/simplemovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,27 +27,30 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SimpleMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L207-L210 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L207-L210 type SimpleMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Minimize *bool `json:"minimize,omitempty"` - Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` - Predict *int `json:"predict,omitempty"` - Settings EmptyObject `json:"settings"` - Window *int `json:"window,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Minimize *bool `json:"minimize,omitempty"` + Model string `json:"model,omitempty"` + Name *string `json:"name,omitempty"` + Predict *int `json:"predict,omitempty"` + Settings EmptyObject `json:"settings"` + Window *int `json:"window,omitempty"` } func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,9 +70,12 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -82,8 +88,17 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "minimize": - if err := dec.Decode(&s.Minimize); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Minimize = &value + case bool: + s.Minimize = &v } case "model": @@ -92,13 +107,27 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "predict": - if err := dec.Decode(&s.Predict); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Predict = &value + case float64: + f := int(v) + s.Predict = &f } case "settings": @@ -107,8 +136,19 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { } case "window": - if err := dec.Decode(&s.Window); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Window = &value + case float64: + f := int(v) + s.Window = &f } } diff --git a/typedapi/types/simplequerystringflags.go b/typedapi/types/simplequerystringflags.go old mode 100755 new mode 100644 index 76951b6887..aa069d385a --- a/typedapi/types/simplequerystringflags.go +++ b/typedapi/types/simplequerystringflags.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // simplequerystringflag.SimpleQueryStringFlag // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L271-L276 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L271-L276 type SimpleQueryStringFlags interface{} diff --git a/typedapi/types/simplequerystringquery.go b/typedapi/types/simplequerystringquery.go old mode 100755 new mode 100644 index 4f1ca8d607..dbf170d206 --- a/typedapi/types/simplequerystringquery.go +++ b/typedapi/types/simplequerystringquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -28,12 +28,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // SimpleQueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/fulltext.ts#L294-L312 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/fulltext.ts#L294-L312 type SimpleQueryStringQuery struct { AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` Analyzer *string `json:"analyzer,omitempty"` @@ -53,6 +55,7 @@ type SimpleQueryStringQuery struct { } func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,23 +70,55 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { switch t { case "analyze_wildcard": - if err := dec.Decode(&s.AnalyzeWildcard); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AnalyzeWildcard = &value + case bool: + s.AnalyzeWildcard = &v } case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "auto_generate_synonyms_phrase_query": - if err := dec.Decode(&s.AutoGenerateSynonymsPhraseQuery); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AutoGenerateSynonymsPhraseQuery = &value + case bool: + s.AutoGenerateSynonymsPhraseQuery = &v } case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f } case "default_operator": @@ -97,6 +132,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "flags": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -116,23 +152,63 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_max_expansions": - if err := dec.Decode(&s.FuzzyMaxExpansions); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FuzzyMaxExpansions = &value + case float64: + f := int(v) + s.FuzzyMaxExpansions = &f } case "fuzzy_prefix_length": - if err := dec.Decode(&s.FuzzyPrefixLength); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FuzzyPrefixLength = &value + case float64: + f := int(v) + s.FuzzyPrefixLength = &f } case "fuzzy_transpositions": - if err := dec.Decode(&s.FuzzyTranspositions); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FuzzyTranspositions = &value + case bool: + s.FuzzyTranspositions = &v } case "lenient": - if err := dec.Decode(&s.Lenient); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v } case "minimum_should_match": @@ -141,19 +217,28 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "query": - if err := dec.Decode(&s.Query); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Query = o case "_name": - if err := dec.Decode(&s.QueryName_); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.QueryName_ = &o case "quote_field_suffix": - if err := dec.Decode(&s.QuoteFieldSuffix); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.QuoteFieldSuffix = &o } } diff --git a/typedapi/types/simplevalueaggregate.go b/typedapi/types/simplevalueaggregate.go old mode 100755 new mode 100644 index 932cf61faa..5a7a5170be --- a/typedapi/types/simplevalueaggregate.go +++ b/typedapi/types/simplevalueaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // SimpleValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L223-L224 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L224-L225 type SimpleValueAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type SimpleValueAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *SimpleValueAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewSimpleValueAggregate returns a SimpleValueAggregate. func NewSimpleValueAggregate() *SimpleValueAggregate { r := &SimpleValueAggregate{} diff --git a/typedapi/types/simulatedactions.go b/typedapi/types/simulatedactions.go old mode 100755 new mode 100644 index c7203a335e..0571539102 --- a/typedapi/types/simulatedactions.go +++ b/typedapi/types/simulatedactions.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SimulatedActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L93-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L93-L97 type SimulatedActions struct { Actions []string `json:"actions"` All *SimulatedActions `json:"all,omitempty"` UseAll bool `json:"use_all"` } +func (s *SimulatedActions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "all": + if err := dec.Decode(&s.All); err != nil { + return err + } + + case "use_all": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UseAll = value + case bool: + s.UseAll = v + } + + } + } + return nil +} + // NewSimulatedActions returns a SimulatedActions. func NewSimulatedActions() *SimulatedActions { r := &SimulatedActions{} diff --git a/typedapi/types/simulateingest.go b/typedapi/types/simulateingest.go old mode 100755 new mode 100644 index a2644492ed..f221fad61d --- a/typedapi/types/simulateingest.go +++ b/typedapi/types/simulateingest.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SimulateIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/simulate/types.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/simulate/types.ts#L28-L31 type SimulateIngest struct { Pipeline *string `json:"pipeline,omitempty"` Timestamp DateTime `json:"timestamp"` } +func (s *SimulateIngest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "pipeline": + if err := dec.Decode(&s.Pipeline); err != nil { + return err + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewSimulateIngest returns a SimulateIngest. func NewSimulateIngest() *SimulateIngest { r := &SimulateIngest{} diff --git a/typedapi/types/sizefield.go b/typedapi/types/sizefield.go old mode 100755 new mode 100644 index 2a9603992d..8f2d727f31 --- a/typedapi/types/sizefield.go +++ b/typedapi/types/sizefield.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SizeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L54-L56 type SizeField struct { Enabled bool `json:"enabled"` } +func (s *SizeField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewSizeField returns a SizeField. func NewSizeField() *SizeField { r := &SizeField{} diff --git a/typedapi/types/slackaction.go b/typedapi/types/slackaction.go old mode 100755 new mode 100644 index d689400be3..4021bed3c5 --- a/typedapi/types/slackaction.go +++ b/typedapi/types/slackaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SlackAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L91-L94 type SlackAction struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` diff --git a/typedapi/types/slackattachment.go b/typedapi/types/slackattachment.go old mode 100755 new mode 100644 index f30c52d18b..4a44d0e303 --- a/typedapi/types/slackattachment.go +++ b/typedapi/types/slackattachment.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SlackAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L101-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L101-L117 type SlackAttachment struct { AuthorIcon *string `json:"author_icon,omitempty"` AuthorLink *string `json:"author_link,omitempty"` @@ -41,6 +49,140 @@ type SlackAttachment struct { Ts *int64 `json:"ts,omitempty"` } +func (s *SlackAttachment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "author_icon": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AuthorIcon = &o + + case "author_link": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AuthorLink = &o + + case "author_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AuthorName = o + + case "color": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Color = &o + + case "fallback": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Fallback = &o + + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return err + } + + case "footer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Footer = &o + + case "footer_icon": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FooterIcon = &o + + case "image_url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ImageUrl = &o + + case "pretext": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pretext = &o + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = &o + + case "thumb_url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ThumbUrl = &o + + case "title": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Title = o + + case "title_link": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TitleLink = &o + + case "ts": + if err := dec.Decode(&s.Ts); err != nil { + return err + } + + } + } + return nil +} + // NewSlackAttachment returns a SlackAttachment. func NewSlackAttachment() *SlackAttachment { r := &SlackAttachment{} diff --git a/typedapi/types/slackattachmentfield.go b/typedapi/types/slackattachmentfield.go old mode 100755 new mode 100644 index 4c6a5c4f1f..4e85685adc --- a/typedapi/types/slackattachmentfield.go +++ b/typedapi/types/slackattachmentfield.go @@ -16,19 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SlackAttachmentField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L119-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L119-L123 type SlackAttachmentField struct { Int bool `json:"short"` Title string `json:"title"` Value string `json:"value"` } +func (s *SlackAttachmentField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "short": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Int = value + case bool: + s.Int = v + } + + case "title": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Title = o + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewSlackAttachmentField returns a SlackAttachmentField. func NewSlackAttachmentField() *SlackAttachmentField { r := &SlackAttachmentField{} diff --git a/typedapi/types/slackdynamicattachment.go b/typedapi/types/slackdynamicattachment.go old mode 100755 new mode 100644 index 5c0bcc5665..00f083c3b5 --- a/typedapi/types/slackdynamicattachment.go +++ b/typedapi/types/slackdynamicattachment.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SlackDynamicAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L125-L128 type SlackDynamicAttachment struct { AttachmentTemplate SlackAttachment `json:"attachment_template"` ListPath string `json:"list_path"` diff --git a/typedapi/types/slackmessage.go b/typedapi/types/slackmessage.go old mode 100755 new mode 100644 index 1748d8395e..9a6778c27e --- a/typedapi/types/slackmessage.go +++ b/typedapi/types/slackmessage.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SlackMessage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L130-L137 type SlackMessage struct { Attachments []SlackAttachment `json:"attachments"` DynamicAttachments *SlackDynamicAttachment `json:"dynamic_attachments,omitempty"` diff --git a/typedapi/types/slackresult.go b/typedapi/types/slackresult.go old mode 100755 new mode 100644 index 579c4c660f..f5594ee780 --- a/typedapi/types/slackresult.go +++ b/typedapi/types/slackresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SlackResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L96-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L96-L99 type SlackResult struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` diff --git a/typedapi/types/slicedscroll.go b/typedapi/types/slicedscroll.go old mode 100755 new mode 100644 index 29a95bbb63..f13e7ca496 --- a/typedapi/types/slicedscroll.go +++ b/typedapi/types/slicedscroll.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SlicedScroll type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/SlicedScroll.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/SlicedScroll.ts#L23-L27 type SlicedScroll struct { Field *string `json:"field,omitempty"` Id string `json:"id"` Max int `json:"max"` } +func (s *SlicedScroll) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Max = value + case float64: + f := int(v) + s.Max = f + } + + } + } + return nil +} + // NewSlicedScroll returns a SlicedScroll. func NewSlicedScroll() *SlicedScroll { r := &SlicedScroll{} diff --git a/typedapi/types/slices.go b/typedapi/types/slices.go old mode 100755 new mode 100644 index 5d81aae6ef..6737de49e7 --- a/typedapi/types/slices.go +++ b/typedapi/types/slices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int // slicescalculation.SlicesCalculation // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L326-L331 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L326-L331 type Slices interface{} diff --git a/typedapi/types/slm.go b/typedapi/types/slm.go old mode 100755 new mode 100644 index 13c3c657b3..7f29646244 --- a/typedapi/types/slm.go +++ b/typedapi/types/slm.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Slm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L440-L443 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L440-L443 type Slm struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -30,6 +40,75 @@ type Slm struct { PolicyStats *Statistics `json:"policy_stats,omitempty"` } +func (s *Slm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "policy_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PolicyCount = &value + case float64: + f := int(v) + s.PolicyCount = &f + } + + case "policy_stats": + if err := dec.Decode(&s.PolicyStats); err != nil { + return err + } + + } + } + return nil +} + // NewSlm returns a Slm. func NewSlm() *Slm { r := &Slm{} diff --git a/typedapi/types/slmindicator.go b/typedapi/types/slmindicator.go new file mode 100644 index 0000000000..28c3268266 --- /dev/null +++ b/typedapi/types/slmindicator.go @@ -0,0 +1,43 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indicatorhealthstatus" +) + +// SlmIndicator type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L154-L158 +type SlmIndicator struct { + Details *SlmIndicatorDetails `json:"details,omitempty"` + Diagnosis []Diagnosis `json:"diagnosis,omitempty"` + Impacts []Impact `json:"impacts,omitempty"` + Status indicatorhealthstatus.IndicatorHealthStatus `json:"status"` + Symptom string `json:"symptom"` +} + +// NewSlmIndicator returns a SlmIndicator. +func NewSlmIndicator() *SlmIndicator { + r := &SlmIndicator{} + + return r +} diff --git a/typedapi/types/slmindicatordetails.go b/typedapi/types/slmindicatordetails.go new file mode 100644 index 0000000000..556f54d718 --- /dev/null +++ b/typedapi/types/slmindicatordetails.go @@ -0,0 +1,94 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/lifecycleoperationmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// SlmIndicatorDetails type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L159-L163 +type SlmIndicatorDetails struct { + Policies int64 `json:"policies"` + SlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"slm_status"` + UnhealthyPolicies SlmIndicatorUnhealthyPolicies `json:"unhealthy_policies"` +} + +func (s *SlmIndicatorDetails) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "policies": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Policies = value + case float64: + f := int64(v) + s.Policies = f + } + + case "slm_status": + if err := dec.Decode(&s.SlmStatus); err != nil { + return err + } + + case "unhealthy_policies": + if err := dec.Decode(&s.UnhealthyPolicies); err != nil { + return err + } + + } + } + return nil +} + +// NewSlmIndicatorDetails returns a SlmIndicatorDetails. +func NewSlmIndicatorDetails() *SlmIndicatorDetails { + r := &SlmIndicatorDetails{} + + return r +} diff --git a/typedapi/types/slmindicatorunhealthypolicies.go b/typedapi/types/slmindicatorunhealthypolicies.go new file mode 100644 index 0000000000..0c5d7e032b --- /dev/null +++ b/typedapi/types/slmindicatorunhealthypolicies.go @@ -0,0 +1,91 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 + +package types + +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + +// SlmIndicatorUnhealthyPolicies type. +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/health_report/types.ts#L165-L168 +type SlmIndicatorUnhealthyPolicies struct { + Count int64 `json:"count"` + InvocationsSinceLastSuccess map[string]int64 `json:"invocations_since_last_success,omitempty"` +} + +func (s *SlmIndicatorUnhealthyPolicies) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "invocations_since_last_success": + if s.InvocationsSinceLastSuccess == nil { + s.InvocationsSinceLastSuccess = make(map[string]int64, 0) + } + if err := dec.Decode(&s.InvocationsSinceLastSuccess); err != nil { + return err + } + + } + } + return nil +} + +// NewSlmIndicatorUnhealthyPolicies returns a SlmIndicatorUnhealthyPolicies. +func NewSlmIndicatorUnhealthyPolicies() *SlmIndicatorUnhealthyPolicies { + r := &SlmIndicatorUnhealthyPolicies{ + InvocationsSinceLastSuccess: make(map[string]int64, 0), + } + + return r +} diff --git a/typedapi/types/slmpolicy.go b/typedapi/types/slmpolicy.go old mode 100755 new mode 100644 index 66201a85fc..da5f6843c8 --- a/typedapi/types/slmpolicy.go +++ b/typedapi/types/slmpolicy.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SLMPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 type SLMPolicy struct { Config *Configuration `json:"config,omitempty"` Name string `json:"name"` @@ -31,6 +39,54 @@ type SLMPolicy struct { Schedule string `json:"schedule"` } +func (s *SLMPolicy) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "config": + if err := dec.Decode(&s.Config); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "repository": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Repository = o + + case "retention": + if err := dec.Decode(&s.Retention); err != nil { + return err + } + + case "schedule": + if err := dec.Decode(&s.Schedule); err != nil { + return err + } + + } + } + return nil +} + // NewSLMPolicy returns a SLMPolicy. func NewSLMPolicy() *SLMPolicy { r := &SLMPolicy{} diff --git a/typedapi/types/slowlogsettings.go b/typedapi/types/slowlogsettings.go old mode 100755 new mode 100644 index 7e0055810b..41f80946a1 --- a/typedapi/types/slowlogsettings.go +++ b/typedapi/types/slowlogsettings.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SlowlogSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L472-L477 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L472-L477 type SlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` @@ -30,6 +40,69 @@ type SlowlogSettings struct { Threshold *SlowlogTresholds `json:"threshold,omitempty"` } +func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "level": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Level = &o + + case "reformat": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Reformat = &value + case bool: + s.Reformat = &v + } + + case "source": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Source = &value + case float64: + f := int(v) + s.Source = &f + } + + case "threshold": + if err := dec.Decode(&s.Threshold); err != nil { + return err + } + + } + } + return nil +} + // NewSlowlogSettings returns a SlowlogSettings. func NewSlowlogSettings() *SlowlogSettings { r := &SlowlogSettings{} diff --git a/typedapi/types/slowlogtresholdlevels.go b/typedapi/types/slowlogtresholdlevels.go old mode 100755 new mode 100644 index e5971c0046..e6f6a449f4 --- a/typedapi/types/slowlogtresholdlevels.go +++ b/typedapi/types/slowlogtresholdlevels.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SlowlogTresholdLevels type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L490-L495 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L490-L495 type SlowlogTresholdLevels struct { Debug Duration `json:"debug,omitempty"` Info Duration `json:"info,omitempty"` @@ -30,6 +38,46 @@ type SlowlogTresholdLevels struct { Warn Duration `json:"warn,omitempty"` } +func (s *SlowlogTresholdLevels) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "debug": + if err := dec.Decode(&s.Debug); err != nil { + return err + } + + case "info": + if err := dec.Decode(&s.Info); err != nil { + return err + } + + case "trace": + if err := dec.Decode(&s.Trace); err != nil { + return err + } + + case "warn": + if err := dec.Decode(&s.Warn); err != nil { + return err + } + + } + } + return nil +} + // NewSlowlogTresholdLevels returns a SlowlogTresholdLevels. func NewSlowlogTresholdLevels() *SlowlogTresholdLevels { r := &SlowlogTresholdLevels{} diff --git a/typedapi/types/slowlogtresholds.go b/typedapi/types/slowlogtresholds.go old mode 100755 new mode 100644 index 3360778c68..47fd017a4e --- a/typedapi/types/slowlogtresholds.go +++ b/typedapi/types/slowlogtresholds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SlowlogTresholds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L479-L488 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L479-L488 type SlowlogTresholds struct { Fetch *SlowlogTresholdLevels `json:"fetch,omitempty"` // Index The indexing slow log, similar in functionality to the search slow log. The diff --git a/typedapi/types/smoothingmodelcontainer.go b/typedapi/types/smoothingmodelcontainer.go old mode 100755 new mode 100644 index a27f7ffa08..9801139ba6 --- a/typedapi/types/smoothingmodelcontainer.go +++ b/typedapi/types/smoothingmodelcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SmoothingModelContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L224-L231 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L228-L235 type SmoothingModelContainer struct { Laplace *LaplaceSmoothingModel `json:"laplace,omitempty"` LinearInterpolation *LinearInterpolationSmoothingModel `json:"linear_interpolation,omitempty"` diff --git a/typedapi/types/snapshotindexstats.go b/typedapi/types/snapshotindexstats.go old mode 100755 new mode 100644 index d5b8e917c7..9acaae15a2 --- a/typedapi/types/snapshotindexstats.go +++ b/typedapi/types/snapshotindexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SnapshotIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 type SnapshotIndexStats struct { Shards map[string]SnapshotShardsStatus `json:"shards"` ShardsStats SnapshotShardsStats `json:"shards_stats"` diff --git a/typedapi/types/snapshotinfo.go b/typedapi/types/snapshotinfo.go old mode 100755 new mode 100644 index cf0c460c99..286ff2ebf6 --- a/typedapi/types/snapshotinfo.go +++ b/typedapi/types/snapshotinfo.go @@ -16,39 +16,188 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // SnapshotInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotInfo.ts#L41-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotInfo.ts#L41-L65 type SnapshotInfo struct { - DataStreams []string `json:"data_streams"` - Duration Duration `json:"duration,omitempty"` - DurationInMillis *int64 `json:"duration_in_millis,omitempty"` - EndTime DateTime `json:"end_time,omitempty"` - EndTimeInMillis *int64 `json:"end_time_in_millis,omitempty"` - Failures []SnapshotShardFailure `json:"failures,omitempty"` - FeatureStates []InfoFeatureState `json:"feature_states,omitempty"` - IncludeGlobalState *bool `json:"include_global_state,omitempty"` - IndexDetails map[string]IndexDetails `json:"index_details,omitempty"` - Indices []string `json:"indices,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - Reason *string `json:"reason,omitempty"` - Repository *string `json:"repository,omitempty"` - Shards *ShardStatistics `json:"shards,omitempty"` - Snapshot string `json:"snapshot"` - StartTime DateTime `json:"start_time,omitempty"` - StartTimeInMillis *int64 `json:"start_time_in_millis,omitempty"` - State *string `json:"state,omitempty"` - Uuid string `json:"uuid"` - Version *string `json:"version,omitempty"` - VersionId *int64 `json:"version_id,omitempty"` + DataStreams []string `json:"data_streams"` + Duration Duration `json:"duration,omitempty"` + DurationInMillis *int64 `json:"duration_in_millis,omitempty"` + EndTime DateTime `json:"end_time,omitempty"` + EndTimeInMillis *int64 `json:"end_time_in_millis,omitempty"` + Failures []SnapshotShardFailure `json:"failures,omitempty"` + FeatureStates []InfoFeatureState `json:"feature_states,omitempty"` + IncludeGlobalState *bool `json:"include_global_state,omitempty"` + IndexDetails map[string]IndexDetails `json:"index_details,omitempty"` + Indices []string `json:"indices,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + Reason *string `json:"reason,omitempty"` + Repository *string `json:"repository,omitempty"` + Shards *ShardStatistics `json:"shards,omitempty"` + Snapshot string `json:"snapshot"` + StartTime DateTime `json:"start_time,omitempty"` + StartTimeInMillis *int64 `json:"start_time_in_millis,omitempty"` + State *string `json:"state,omitempty"` + Uuid string `json:"uuid"` + Version *string `json:"version,omitempty"` + VersionId *int64 `json:"version_id,omitempty"` +} + +func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data_streams": + if err := dec.Decode(&s.DataStreams); err != nil { + return err + } + + case "duration": + if err := dec.Decode(&s.Duration); err != nil { + return err + } + + case "duration_in_millis": + if err := dec.Decode(&s.DurationInMillis); err != nil { + return err + } + + case "end_time": + if err := dec.Decode(&s.EndTime); err != nil { + return err + } + + case "end_time_in_millis": + if err := dec.Decode(&s.EndTimeInMillis); err != nil { + return err + } + + case "failures": + if err := dec.Decode(&s.Failures); err != nil { + return err + } + + case "feature_states": + if err := dec.Decode(&s.FeatureStates); err != nil { + return err + } + + case "include_global_state": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeGlobalState = &value + case bool: + s.IncludeGlobalState = &v + } + + case "index_details": + if s.IndexDetails == nil { + s.IndexDetails = make(map[string]IndexDetails, 0) + } + if err := dec.Decode(&s.IndexDetails); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + case "repository": + if err := dec.Decode(&s.Repository); err != nil { + return err + } + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "snapshot": + if err := dec.Decode(&s.Snapshot); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = &o + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "version_id": + if err := dec.Decode(&s.VersionId); err != nil { + return err + } + + } + } + return nil } // NewSnapshotInfo returns a SnapshotInfo. diff --git a/typedapi/types/snapshotlifecycle.go b/typedapi/types/snapshotlifecycle.go old mode 100755 new mode 100644 index 48d2c197a9..652df8a431 --- a/typedapi/types/snapshotlifecycle.go +++ b/typedapi/types/snapshotlifecycle.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SnapshotLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 type SnapshotLifecycle struct { InProgress *InProgress `json:"in_progress,omitempty"` LastFailure *Invocation `json:"last_failure,omitempty"` @@ -36,6 +44,76 @@ type SnapshotLifecycle struct { Version int64 `json:"version"` } +func (s *SnapshotLifecycle) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "in_progress": + if err := dec.Decode(&s.InProgress); err != nil { + return err + } + + case "last_failure": + if err := dec.Decode(&s.LastFailure); err != nil { + return err + } + + case "last_success": + if err := dec.Decode(&s.LastSuccess); err != nil { + return err + } + + case "modified_date": + if err := dec.Decode(&s.ModifiedDate); err != nil { + return err + } + + case "modified_date_millis": + if err := dec.Decode(&s.ModifiedDateMillis); err != nil { + return err + } + + case "next_execution": + if err := dec.Decode(&s.NextExecution); err != nil { + return err + } + + case "next_execution_millis": + if err := dec.Decode(&s.NextExecutionMillis); err != nil { + return err + } + + case "policy": + if err := dec.Decode(&s.Policy); err != nil { + return err + } + + case "stats": + if err := dec.Decode(&s.Stats); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSnapshotLifecycle returns a SnapshotLifecycle. func NewSnapshotLifecycle() *SnapshotLifecycle { r := &SnapshotLifecycle{} diff --git a/typedapi/types/snapshotresponseitem.go b/typedapi/types/snapshotresponseitem.go old mode 100755 new mode 100644 index 1cb69bcde3..3c39730396 --- a/typedapi/types/snapshotresponseitem.go +++ b/typedapi/types/snapshotresponseitem.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SnapshotResponseItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/get/SnapshotGetResponse.ts#L42-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/get/SnapshotGetResponse.ts#L42-L46 type SnapshotResponseItem struct { Error *ErrorCause `json:"error,omitempty"` Repository string `json:"repository"` Snapshots []SnapshotInfo `json:"snapshots,omitempty"` } +func (s *SnapshotResponseItem) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "error": + if err := dec.Decode(&s.Error); err != nil { + return err + } + + case "repository": + if err := dec.Decode(&s.Repository); err != nil { + return err + } + + case "snapshots": + if err := dec.Decode(&s.Snapshots); err != nil { + return err + } + + } + } + return nil +} + // NewSnapshotResponseItem returns a SnapshotResponseItem. func NewSnapshotResponseItem() *SnapshotResponseItem { r := &SnapshotResponseItem{} diff --git a/typedapi/types/snapshotrestore.go b/typedapi/types/snapshotrestore.go old mode 100755 new mode 100644 index 5db93012ae..efa28365d7 --- a/typedapi/types/snapshotrestore.go +++ b/typedapi/types/snapshotrestore.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SnapshotRestore type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 type SnapshotRestore struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` Snapshot string `json:"snapshot"` } +func (s *SnapshotRestore) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "shards": + if err := dec.Decode(&s.Shards); err != nil { + return err + } + + case "snapshot": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Snapshot = o + + } + } + return nil +} + // NewSnapshotRestore returns a SnapshotRestore. func NewSnapshotRestore() *SnapshotRestore { r := &SnapshotRestore{} diff --git a/typedapi/types/snapshotshardfailure.go b/typedapi/types/snapshotshardfailure.go old mode 100755 new mode 100644 index 943486a9e5..eadd564a20 --- a/typedapi/types/snapshotshardfailure.go +++ b/typedapi/types/snapshotshardfailure.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SnapshotShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 type SnapshotShardFailure struct { Index string `json:"index"` NodeId string `json:"node_id"` @@ -31,6 +39,57 @@ type SnapshotShardFailure struct { Status string `json:"status"` } +func (s *SnapshotShardFailure) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "shard_id": + if err := dec.Decode(&s.ShardId); err != nil { + return err + } + + case "status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = o + + } + } + return nil +} + // NewSnapshotShardFailure returns a SnapshotShardFailure. func NewSnapshotShardFailure() *SnapshotShardFailure { r := &SnapshotShardFailure{} diff --git a/typedapi/types/snapshotshardsstats.go b/typedapi/types/snapshotshardsstats.go old mode 100755 new mode 100644 index 470a2289b5..78a3f9f403 --- a/typedapi/types/snapshotshardsstats.go +++ b/typedapi/types/snapshotshardsstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SnapshotShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 type SnapshotShardsStats struct { Done int64 `json:"done"` Failed int64 `json:"failed"` @@ -32,6 +42,116 @@ type SnapshotShardsStats struct { Total int64 `json:"total"` } +func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "done": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Done = value + case float64: + f := int64(v) + s.Done = f + } + + case "failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Failed = value + case float64: + f := int64(v) + s.Failed = f + } + + case "finalizing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Finalizing = value + case float64: + f := int64(v) + s.Finalizing = f + } + + case "initializing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Initializing = value + case float64: + f := int64(v) + s.Initializing = f + } + + case "started": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Started = value + case float64: + f := int64(v) + s.Started = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewSnapshotShardsStats returns a SnapshotShardsStats. func NewSnapshotShardsStats() *SnapshotShardsStats { r := &SnapshotShardsStats{} diff --git a/typedapi/types/snapshotshardsstatus.go b/typedapi/types/snapshotshardsstatus.go old mode 100755 new mode 100644 index 4d20e8bcdc..4aff82ebaf --- a/typedapi/types/snapshotshardsstatus.go +++ b/typedapi/types/snapshotshardsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // SnapshotShardsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 type SnapshotShardsStatus struct { Stage shardsstatsstage.ShardsStatsStage `json:"stage"` Stats ShardsStatsSummary `json:"stats"` diff --git a/typedapi/types/snapshotsrecord.go b/typedapi/types/snapshotsrecord.go old mode 100755 new mode 100644 index f0c7eb2827..5960ff23ca --- a/typedapi/types/snapshotsrecord.go +++ b/typedapi/types/snapshotsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,7 +30,7 @@ import ( // SnapshotsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/snapshots/types.ts#L24-L90 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/snapshots/types.ts#L24-L90 type SnapshotsRecord struct { // Duration duration Duration Duration `json:"duration,omitempty"` @@ -61,6 +61,7 @@ type SnapshotsRecord struct { } func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -90,29 +91,44 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { } case "failed_shards", "fs": - if err := dec.Decode(&s.FailedShards); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.FailedShards = &o case "id", "snapshot": - if err := dec.Decode(&s.Id); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Id = &o case "indices", "i": - if err := dec.Decode(&s.Indices); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Indices = &o case "reason", "r": - if err := dec.Decode(&s.Reason); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Reason = &o case "repository", "re", "repo": - if err := dec.Decode(&s.Repository); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Repository = &o case "start_epoch", "ste", "startEpoch": if err := dec.Decode(&s.StartEpoch); err != nil { @@ -120,6 +136,7 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { } case "start_time", "sti", "startTime": + rawMsg := json.RawMessage{} dec.Decode(&rawMsg) source := bytes.NewReader(rawMsg) @@ -139,19 +156,28 @@ func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error { } case "status", "s": - if err := dec.Decode(&s.Status); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Status = &o case "successful_shards", "ss": - if err := dec.Decode(&s.SuccessfulShards); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SuccessfulShards = &o case "total_shards", "ts": - if err := dec.Decode(&s.TotalShards); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.TotalShards = &o } } diff --git a/typedapi/types/snapshotstats.go b/typedapi/types/snapshotstats.go old mode 100755 new mode 100644 index c134398461..44850c0f0d --- a/typedapi/types/snapshotstats.go +++ b/typedapi/types/snapshotstats.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotStats.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotStats.ts#L23-L29 type SnapshotStats struct { Incremental FileCountSnapshotStats `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` @@ -31,6 +39,51 @@ type SnapshotStats struct { Total FileCountSnapshotStats `json:"total"` } +func (s *SnapshotStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "incremental": + if err := dec.Decode(&s.Incremental); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "time": + if err := dec.Decode(&s.Time); err != nil { + return err + } + + case "time_in_millis": + if err := dec.Decode(&s.TimeInMillis); err != nil { + return err + } + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + } + } + return nil +} + // NewSnapshotStats returns a SnapshotStats. func NewSnapshotStats() *SnapshotStats { r := &SnapshotStats{} diff --git a/typedapi/types/snowballanalyzer.go b/typedapi/types/snowballanalyzer.go old mode 100755 new mode 100644 index ec9beacdc6..7f048fb6b6 --- a/typedapi/types/snowballanalyzer.go +++ b/typedapi/types/snowballanalyzer.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/snowballlanguage" + + "bytes" + "errors" + "io" + + "encoding/json" ) // SnowballAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L88-L93 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L88-L93 type SnowballAnalyzer struct { Language snowballlanguage.SnowballLanguage `json:"language"` Stopwords []string `json:"stopwords,omitempty"` @@ -34,6 +40,57 @@ type SnowballAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *SnowballAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "language": + if err := dec.Decode(&s.Language); err != nil { + return err + } + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSnowballAnalyzer returns a SnowballAnalyzer. func NewSnowballAnalyzer() *SnowballAnalyzer { r := &SnowballAnalyzer{} diff --git a/typedapi/types/snowballtokenfilter.go b/typedapi/types/snowballtokenfilter.go old mode 100755 new mode 100644 index 126bb5fa00..ed78e0c089 --- a/typedapi/types/snowballtokenfilter.go +++ b/typedapi/types/snowballtokenfilter.go @@ -16,23 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/snowballlanguage" + + "bytes" + "errors" + "io" + + "encoding/json" ) // SnowballTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L308-L311 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L309-L312 type SnowballTokenFilter struct { Language snowballlanguage.SnowballLanguage `json:"language"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *SnowballTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "language": + if err := dec.Decode(&s.Language); err != nil { + return err + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSnowballTokenFilter returns a SnowballTokenFilter. func NewSnowballTokenFilter() *SnowballTokenFilter { r := &SnowballTokenFilter{} diff --git a/typedapi/types/softdeletes.go b/typedapi/types/softdeletes.go old mode 100755 new mode 100644 index 348897dda9..f695d7b756 --- a/typedapi/types/softdeletes.go +++ b/typedapi/types/softdeletes.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SoftDeletes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L50-L63 type SoftDeletes struct { // Enabled Indicates whether soft deletes are enabled on the index. Enabled *bool `json:"enabled,omitempty"` @@ -36,6 +46,45 @@ type SoftDeletes struct { RetentionLease *RetentionLease `json:"retention_lease,omitempty"` } +func (s *SoftDeletes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "retention_lease": + if err := dec.Decode(&s.RetentionLease); err != nil { + return err + } + + } + } + return nil +} + // NewSoftDeletes returns a SoftDeletes. func NewSoftDeletes() *SoftDeletes { r := &SoftDeletes{} diff --git a/typedapi/types/sort.go b/typedapi/types/sort.go old mode 100755 new mode 100644 index 1ea9aec7d9..5aff56ad73 --- a/typedapi/types/sort.go +++ b/typedapi/types/sort.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Sort type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L99-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L99-L99 type Sort []SortCombinations diff --git a/typedapi/types/sortcombinations.go b/typedapi/types/sortcombinations.go old mode 100755 new mode 100644 index 83aae092f9..ea5cedb73b --- a/typedapi/types/sortcombinations.go +++ b/typedapi/types/sortcombinations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // SortOptions // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L93-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L93-L97 type SortCombinations interface{} diff --git a/typedapi/types/sortoptions.go b/typedapi/types/sortoptions.go old mode 100755 new mode 100644 index 01fc4e025a..54550a6cb6 --- a/typedapi/types/sortoptions.go +++ b/typedapi/types/sortoptions.go @@ -16,24 +16,79 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "encoding/json" ) // SortOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L82-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L82-L91 type SortOptions struct { Doc_ *ScoreSort `json:"_doc,omitempty"` GeoDistance_ *GeoDistanceSort `json:"_geo_distance,omitempty"` Score_ *ScoreSort `json:"_score,omitempty"` Script_ *ScriptSort `json:"_script,omitempty"` - SortOptions map[string]FieldSort `json:"-"` + SortOptions map[string]FieldSort `json:"SortOptions,omitempty"` +} + +func (s *SortOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_doc": + if err := dec.Decode(&s.Doc_); err != nil { + return err + } + + case "_geo_distance": + if err := dec.Decode(&s.GeoDistance_); err != nil { + return err + } + + case "_score": + if err := dec.Decode(&s.Score_); err != nil { + return err + } + + case "_script": + if err := dec.Decode(&s.Script_); err != nil { + return err + } + + case "SortOptions": + if s.SortOptions == nil { + s.SortOptions = make(map[string]FieldSort, 0) + } + if err := dec.Decode(&s.SortOptions); err != nil { + return err + } + + default: + + } + } + return nil } // MarhsalJSON overrides marshalling for types with additional properties @@ -55,6 +110,7 @@ func (s SortOptions) MarshalJSON() ([]byte, error) { for key, value := range s.SortOptions { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "SortOptions") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/sortprocessor.go b/typedapi/types/sortprocessor.go old mode 100755 new mode 100644 index 82e6251a22..f1088b21d9 --- a/typedapi/types/sortprocessor.go +++ b/typedapi/types/sortprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortorder" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SortProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L348-L352 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L348-L352 type SortProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -38,6 +46,84 @@ type SortProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *SortProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewSortProcessor returns a SortProcessor. func NewSortProcessor() *SortProcessor { r := &SortProcessor{} diff --git a/typedapi/types/sortresults.go b/typedapi/types/sortresults.go old mode 100755 new mode 100644 index 6ca03b40e8..dd5950855b --- a/typedapi/types/sortresults.go +++ b/typedapi/types/sortresults.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SortResults type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/sort.ts#L101-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/sort.ts#L101-L101 type SortResults []FieldValue diff --git a/typedapi/types/sourceconfig.go b/typedapi/types/sourceconfig.go old mode 100755 new mode 100644 index 86b712ed82..683aa74709 --- a/typedapi/types/sourceconfig.go +++ b/typedapi/types/sourceconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // bool // SourceFilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/SourceFilter.ts#L33-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/SourceFilter.ts#L33-L37 type SourceConfig interface{} diff --git a/typedapi/types/sourceconfigparam.go b/typedapi/types/sourceconfigparam.go old mode 100755 new mode 100644 index 79d2ecd620..5690f63477 --- a/typedapi/types/sourceconfigparam.go +++ b/typedapi/types/sourceconfigparam.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // bool // []string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/SourceFilter.ts#L39-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/SourceFilter.ts#L39-L45 type SourceConfigParam interface{} diff --git a/typedapi/types/sourcefield.go b/typedapi/types/sourcefield.go old mode 100755 new mode 100644 index e661a165dd..6d27c38a20 --- a/typedapi/types/sourcefield.go +++ b/typedapi/types/sourcefield.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sourcefieldmode" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SourceField type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/meta-fields.ts#L58-L65 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/meta-fields.ts#L58-L65 type SourceField struct { Compress *bool `json:"compress,omitempty"` CompressThreshold *string `json:"compress_threshold,omitempty"` @@ -36,6 +44,77 @@ type SourceField struct { Mode *sourcefieldmode.SourceFieldMode `json:"mode,omitempty"` } +func (s *SourceField) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compress": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Compress = &value + case bool: + s.Compress = &v + } + + case "compress_threshold": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompressThreshold = &o + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "excludes": + if err := dec.Decode(&s.Excludes); err != nil { + return err + } + + case "includes": + if err := dec.Decode(&s.Includes); err != nil { + return err + } + + case "mode": + if err := dec.Decode(&s.Mode); err != nil { + return err + } + + } + } + return nil +} + // NewSourceField returns a SourceField. func NewSourceField() *SourceField { r := &SourceField{} diff --git a/typedapi/types/sourcefilter.go b/typedapi/types/sourcefilter.go old mode 100755 new mode 100644 index 8f690a8df9..f557124118 --- a/typedapi/types/sourcefilter.go +++ b/typedapi/types/sourcefilter.go @@ -16,18 +16,83 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SourceFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/SourceFilter.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/SourceFilter.ts#L23-L31 type SourceFilter struct { Excludes []string `json:"excludes,omitempty"` Includes []string `json:"includes,omitempty"` } +func (s *SourceFilter) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Includes) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "excludes", "exclude": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Excludes = append(s.Excludes, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Excludes); err != nil { + return err + } + } + + case "includes", "include": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Includes = append(s.Includes, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Includes); err != nil { + return err + } + } + + } + } + return nil +} + // NewSourceFilter returns a SourceFilter. func NewSourceFilter() *SourceFilter { r := &SourceFilter{} diff --git a/typedapi/types/spancontainingquery.go b/typedapi/types/spancontainingquery.go old mode 100755 new mode 100644 index fb2ed73605..554803c4a1 --- a/typedapi/types/spancontainingquery.go +++ b/typedapi/types/spancontainingquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanContainingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L25-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L25-L28 type SpanContainingQuery struct { Big *SpanQuery `json:"big,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -30,6 +40,60 @@ type SpanContainingQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanContainingQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "big": + if err := dec.Decode(&s.Big); err != nil { + return err + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "little": + if err := dec.Decode(&s.Little); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanContainingQuery returns a SpanContainingQuery. func NewSpanContainingQuery() *SpanContainingQuery { r := &SpanContainingQuery{} diff --git a/typedapi/types/spanfieldmaskingquery.go b/typedapi/types/spanfieldmaskingquery.go old mode 100755 new mode 100644 index e6429d8369..72464f729f --- a/typedapi/types/spanfieldmaskingquery.go +++ b/typedapi/types/spanfieldmaskingquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanFieldMaskingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L30-L33 type SpanFieldMaskingQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` @@ -30,6 +40,60 @@ type SpanFieldMaskingQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanFieldMaskingQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanFieldMaskingQuery returns a SpanFieldMaskingQuery. func NewSpanFieldMaskingQuery() *SpanFieldMaskingQuery { r := &SpanFieldMaskingQuery{} diff --git a/typedapi/types/spanfirstquery.go b/typedapi/types/spanfirstquery.go old mode 100755 new mode 100644 index 89e761c476..9c7a8b343d --- a/typedapi/types/spanfirstquery.go +++ b/typedapi/types/spanfirstquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanFirstQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L35-L38 type SpanFirstQuery struct { Boost *float32 `json:"boost,omitempty"` End int `json:"end"` @@ -30,6 +40,71 @@ type SpanFirstQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "end": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.End = value + case float64: + f := int(v) + s.End = f + } + + case "match": + if err := dec.Decode(&s.Match); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanFirstQuery returns a SpanFirstQuery. func NewSpanFirstQuery() *SpanFirstQuery { r := &SpanFirstQuery{} diff --git a/typedapi/types/spangapquery.go b/typedapi/types/spangapquery.go old mode 100755 new mode 100644 index e7ea0e91b0..aed680a6ef --- a/typedapi/types/spangapquery.go +++ b/typedapi/types/spangapquery.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SpanGapQuery type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L40-L42 type SpanGapQuery map[string]int diff --git a/typedapi/types/spanmultitermquery.go b/typedapi/types/spanmultitermquery.go old mode 100755 new mode 100644 index 1733377e2e..01a28ec6db --- a/typedapi/types/spanmultitermquery.go +++ b/typedapi/types/spanmultitermquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanMultiTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L44-L47 type SpanMultiTermQuery struct { Boost *float32 `json:"boost,omitempty"` // Match Should be a multi term query (one of wildcard, fuzzy, prefix, range or regexp @@ -31,6 +41,55 @@ type SpanMultiTermQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanMultiTermQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "match": + if err := dec.Decode(&s.Match); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanMultiTermQuery returns a SpanMultiTermQuery. func NewSpanMultiTermQuery() *SpanMultiTermQuery { r := &SpanMultiTermQuery{} diff --git a/typedapi/types/spannearquery.go b/typedapi/types/spannearquery.go old mode 100755 new mode 100644 index ff3a8a636b..9eb0737adf --- a/typedapi/types/spannearquery.go +++ b/typedapi/types/spannearquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanNearQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L49-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L49-L53 type SpanNearQuery struct { Boost *float32 `json:"boost,omitempty"` Clauses []SpanQuery `json:"clauses"` @@ -31,6 +41,85 @@ type SpanNearQuery struct { Slop *int `json:"slop,omitempty"` } +func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "clauses": + if err := dec.Decode(&s.Clauses); err != nil { + return err + } + + case "in_order": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.InOrder = &value + case bool: + s.InOrder = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "slop": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Slop = &value + case float64: + f := int(v) + s.Slop = &f + } + + } + } + return nil +} + // NewSpanNearQuery returns a SpanNearQuery. func NewSpanNearQuery() *SpanNearQuery { r := &SpanNearQuery{} diff --git a/typedapi/types/spannotquery.go b/typedapi/types/spannotquery.go old mode 100755 new mode 100644 index 5b0ea2f957..676536aa4c --- a/typedapi/types/spannotquery.go +++ b/typedapi/types/spannotquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanNotQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L55-L63 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L55-L63 type SpanNotQuery struct { Boost *float32 `json:"boost,omitempty"` Dist *int `json:"dist,omitempty"` @@ -33,6 +43,108 @@ type SpanNotQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "dist": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Dist = &value + case float64: + f := int(v) + s.Dist = &f + } + + case "exclude": + if err := dec.Decode(&s.Exclude); err != nil { + return err + } + + case "include": + if err := dec.Decode(&s.Include); err != nil { + return err + } + + case "post": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Post = &value + case float64: + f := int(v) + s.Post = &f + } + + case "pre": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Pre = &value + case float64: + f := int(v) + s.Pre = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanNotQuery returns a SpanNotQuery. func NewSpanNotQuery() *SpanNotQuery { r := &SpanNotQuery{} diff --git a/typedapi/types/spanorquery.go b/typedapi/types/spanorquery.go old mode 100755 new mode 100644 index 80e057b26f..139a9e69a3 --- a/typedapi/types/spanorquery.go +++ b/typedapi/types/spanorquery.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanOrQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L65-L67 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L65-L67 type SpanOrQuery struct { Boost *float32 `json:"boost,omitempty"` Clauses []SpanQuery `json:"clauses"` QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanOrQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "clauses": + if err := dec.Decode(&s.Clauses); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanOrQuery returns a SpanOrQuery. func NewSpanOrQuery() *SpanOrQuery { r := &SpanOrQuery{} diff --git a/typedapi/types/spanquery.go b/typedapi/types/spanquery.go old mode 100755 new mode 100644 index fd930317fb..a85f90bc2c --- a/typedapi/types/spanquery.go +++ b/typedapi/types/spanquery.go @@ -16,18 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SpanQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L79-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L79-L91 type SpanQuery struct { FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` SpanContaining *SpanContainingQuery `json:"span_containing,omitempty"` SpanFirst *SpanFirstQuery `json:"span_first,omitempty"` - SpanGap map[string]int `json:"span_gap,omitempty"` + SpanGap SpanGapQuery `json:"span_gap,omitempty"` SpanMulti *SpanMultiTermQuery `json:"span_multi,omitempty"` SpanNear *SpanNearQuery `json:"span_near,omitempty"` SpanNot *SpanNotQuery `json:"span_not,omitempty"` @@ -36,6 +44,79 @@ type SpanQuery struct { SpanWithin *SpanWithinQuery `json:"span_within,omitempty"` } +func (s *SpanQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field_masking_span": + if err := dec.Decode(&s.FieldMaskingSpan); err != nil { + return err + } + + case "span_containing": + if err := dec.Decode(&s.SpanContaining); err != nil { + return err + } + + case "span_first": + if err := dec.Decode(&s.SpanFirst); err != nil { + return err + } + + case "span_gap": + if err := dec.Decode(&s.SpanGap); err != nil { + return err + } + + case "span_multi": + if err := dec.Decode(&s.SpanMulti); err != nil { + return err + } + + case "span_near": + if err := dec.Decode(&s.SpanNear); err != nil { + return err + } + + case "span_not": + if err := dec.Decode(&s.SpanNot); err != nil { + return err + } + + case "span_or": + if err := dec.Decode(&s.SpanOr); err != nil { + return err + } + + case "span_term": + if s.SpanTerm == nil { + s.SpanTerm = make(map[string]SpanTermQuery, 0) + } + if err := dec.Decode(&s.SpanTerm); err != nil { + return err + } + + case "span_within": + if err := dec.Decode(&s.SpanWithin); err != nil { + return err + } + + } + } + return nil +} + // NewSpanQuery returns a SpanQuery. func NewSpanQuery() *SpanQuery { r := &SpanQuery{ diff --git a/typedapi/types/spantermquery.go b/typedapi/types/spantermquery.go old mode 100755 new mode 100644 index 75a2473881..af603192a6 --- a/typedapi/types/spantermquery.go +++ b/typedapi/types/spantermquery.go @@ -16,19 +16,86 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L69-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L69-L72 type SpanTermQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Value string `json:"value"` } +func (s *SpanTermQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewSpanTermQuery returns a SpanTermQuery. func NewSpanTermQuery() *SpanTermQuery { r := &SpanTermQuery{} diff --git a/typedapi/types/spanwithinquery.go b/typedapi/types/spanwithinquery.go old mode 100755 new mode 100644 index b026616a9a..25e5c2c602 --- a/typedapi/types/spanwithinquery.go +++ b/typedapi/types/spanwithinquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SpanWithinQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/span.ts#L74-L77 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/span.ts#L74-L77 type SpanWithinQuery struct { Big *SpanQuery `json:"big,omitempty"` Boost *float32 `json:"boost,omitempty"` @@ -30,6 +40,60 @@ type SpanWithinQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *SpanWithinQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "big": + if err := dec.Decode(&s.Big); err != nil { + return err + } + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "little": + if err := dec.Decode(&s.Little); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewSpanWithinQuery returns a SpanWithinQuery. func NewSpanWithinQuery() *SpanWithinQuery { r := &SpanWithinQuery{} diff --git a/typedapi/types/splitprocessor.go b/typedapi/types/splitprocessor.go old mode 100755 new mode 100644 index 737fcaf6ac..e398407d70 --- a/typedapi/types/splitprocessor.go +++ b/typedapi/types/splitprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SplitProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L354-L360 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L354-L360 type SplitProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -36,6 +46,115 @@ type SplitProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *SplitProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "preserve_trailing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveTrailing = &value + case bool: + s.PreserveTrailing = &v + } + + case "separator": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Separator = o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewSplitProcessor returns a SplitProcessor. func NewSplitProcessor() *SplitProcessor { r := &SplitProcessor{} diff --git a/typedapi/types/sql.go b/typedapi/types/sql.go old mode 100755 new mode 100644 index 93b942f543..db0fb013ed --- a/typedapi/types/sql.go +++ b/typedapi/types/sql.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Sql type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L377-L380 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L377-L380 type Sql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -30,6 +40,70 @@ type Sql struct { Queries map[string]XpackQuery `json:"queries"` } +func (s *Sql) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "features": + if s.Features == nil { + s.Features = make(map[string]int, 0) + } + if err := dec.Decode(&s.Features); err != nil { + return err + } + + case "queries": + if s.Queries == nil { + s.Queries = make(map[string]XpackQuery, 0) + } + if err := dec.Decode(&s.Queries); err != nil { + return err + } + + } + } + return nil +} + // NewSql returns a Sql. func NewSql() *Sql { r := &Sql{ diff --git a/typedapi/types/ssl.go b/typedapi/types/ssl.go old mode 100755 new mode 100644 index 602542c4df..25396d107d --- a/typedapi/types/ssl.go +++ b/typedapi/types/ssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // Ssl type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L382-L385 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L382-L385 type Ssl struct { Http FeatureToggle `json:"http"` Transport FeatureToggle `json:"transport"` diff --git a/typedapi/types/standardanalyzer.go b/typedapi/types/standardanalyzer.go old mode 100755 new mode 100644 index 53bdcda852..2106f2d59e --- a/typedapi/types/standardanalyzer.go +++ b/typedapi/types/standardanalyzer.go @@ -16,19 +16,86 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StandardAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L95-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L95-L99 type StandardAnalyzer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` } +func (s *StandardAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTokenLength = &value + case float64: + f := int(v) + s.MaxTokenLength = &f + } + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewStandardAnalyzer returns a StandardAnalyzer. func NewStandardAnalyzer() *StandardAnalyzer { r := &StandardAnalyzer{} diff --git a/typedapi/types/standarddeviationbounds.go b/typedapi/types/standarddeviationbounds.go old mode 100755 new mode 100644 index 6691101fde..20a74d7570 --- a/typedapi/types/standarddeviationbounds.go +++ b/typedapi/types/standarddeviationbounds.go @@ -16,13 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // StandardDeviationBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L259-L266 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L260-L267 type StandardDeviationBounds struct { Lower Float64 `json:"lower,omitempty"` LowerPopulation Float64 `json:"lower_population,omitempty"` @@ -32,6 +39,56 @@ type StandardDeviationBounds struct { UpperSampling Float64 `json:"upper_sampling,omitempty"` } +func (s *StandardDeviationBounds) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "lower": + if err := dec.Decode(&s.Lower); err != nil { + return err + } + + case "lower_population": + if err := dec.Decode(&s.LowerPopulation); err != nil { + return err + } + + case "lower_sampling": + if err := dec.Decode(&s.LowerSampling); err != nil { + return err + } + + case "upper": + if err := dec.Decode(&s.Upper); err != nil { + return err + } + + case "upper_population": + if err := dec.Decode(&s.UpperPopulation); err != nil { + return err + } + + case "upper_sampling": + if err := dec.Decode(&s.UpperSampling); err != nil { + return err + } + + } + } + return nil +} + // NewStandardDeviationBounds returns a StandardDeviationBounds. func NewStandardDeviationBounds() *StandardDeviationBounds { r := &StandardDeviationBounds{} diff --git a/typedapi/types/standarddeviationboundsasstring.go b/typedapi/types/standarddeviationboundsasstring.go old mode 100755 new mode 100644 index 11678db3de..1ddfbacee3 --- a/typedapi/types/standarddeviationboundsasstring.go +++ b/typedapi/types/standarddeviationboundsasstring.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // StandardDeviationBoundsAsString type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L268-L275 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L269-L276 type StandardDeviationBoundsAsString struct { Lower string `json:"lower"` LowerPopulation string `json:"lower_population"` diff --git a/typedapi/types/standardtokenizer.go b/typedapi/types/standardtokenizer.go old mode 100755 new mode 100644 index 27d4601ec6..25c80a6db6 --- a/typedapi/types/standardtokenizer.go +++ b/typedapi/types/standardtokenizer.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StandardTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L104-L107 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L104-L107 type StandardTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *StandardTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTokenLength = &value + case float64: + f := int(v) + s.MaxTokenLength = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewStandardTokenizer returns a StandardTokenizer. func NewStandardTokenizer() *StandardTokenizer { r := &StandardTokenizer{} diff --git a/typedapi/types/statistics.go b/typedapi/types/statistics.go old mode 100755 new mode 100644 index f1f013f0d0..e8629decb2 --- a/typedapi/types/statistics.go +++ b/typedapi/types/statistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Statistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 type Statistics struct { Policy *string `json:"policy,omitempty"` RetentionDeletionTime Duration `json:"retention_deletion_time,omitempty"` @@ -36,6 +46,146 @@ type Statistics struct { TotalSnapshotsTaken *int64 `json:"total_snapshots_taken,omitempty"` } +func (s *Statistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "policy": + if err := dec.Decode(&s.Policy); err != nil { + return err + } + + case "retention_deletion_time": + if err := dec.Decode(&s.RetentionDeletionTime); err != nil { + return err + } + + case "retention_deletion_time_millis": + if err := dec.Decode(&s.RetentionDeletionTimeMillis); err != nil { + return err + } + + case "retention_failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RetentionFailed = &value + case float64: + f := int64(v) + s.RetentionFailed = &f + } + + case "retention_runs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RetentionRuns = &value + case float64: + f := int64(v) + s.RetentionRuns = &f + } + + case "retention_timed_out": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RetentionTimedOut = &value + case float64: + f := int64(v) + s.RetentionTimedOut = &f + } + + case "total_snapshot_deletion_failures", "snapshot_deletion_failures": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSnapshotDeletionFailures = &value + case float64: + f := int64(v) + s.TotalSnapshotDeletionFailures = &f + } + + case "total_snapshots_deleted", "snapshots_deleted": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSnapshotsDeleted = &value + case float64: + f := int64(v) + s.TotalSnapshotsDeleted = &f + } + + case "total_snapshots_failed", "snapshots_failed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSnapshotsFailed = &value + case float64: + f := int64(v) + s.TotalSnapshotsFailed = &f + } + + case "total_snapshots_taken", "snapshots_taken": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalSnapshotsTaken = &value + case float64: + f := int64(v) + s.TotalSnapshotsTaken = &f + } + + } + } + return nil +} + // NewStatistics returns a Statistics. func NewStatistics() *Statistics { r := &Statistics{} diff --git a/typedapi/types/stats.go b/typedapi/types/stats.go old mode 100755 new mode 100644 index 30b893201e..d3bda18493 --- a/typedapi/types/stats.go +++ b/typedapi/types/stats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // Stats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L30-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L30-L53 type Stats struct { AdaptiveSelection map[string]AdaptiveSelection `json:"adaptive_selection,omitempty"` Attributes map[string]string `json:"attributes,omitempty"` @@ -52,6 +60,189 @@ type Stats struct { TransportAddress *string `json:"transport_address,omitempty"` } +func (s *Stats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adaptive_selection": + if s.AdaptiveSelection == nil { + s.AdaptiveSelection = make(map[string]AdaptiveSelection, 0) + } + if err := dec.Decode(&s.AdaptiveSelection); err != nil { + return err + } + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "breakers": + if s.Breakers == nil { + s.Breakers = make(map[string]Breaker, 0) + } + if err := dec.Decode(&s.Breakers); err != nil { + return err + } + + case "discovery": + if err := dec.Decode(&s.Discovery); err != nil { + return err + } + + case "fs": + if err := dec.Decode(&s.Fs); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "http": + if err := dec.Decode(&s.Http); err != nil { + return err + } + + case "indexing_pressure": + if err := dec.Decode(&s.IndexingPressure); err != nil { + return err + } + + case "indices": + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "ingest": + if err := dec.Decode(&s.Ingest); err != nil { + return err + } + + case "ip": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Ip = append(s.Ip, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Ip); err != nil { + return err + } + } + + case "jvm": + if err := dec.Decode(&s.Jvm); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "os": + if err := dec.Decode(&s.Os); err != nil { + return err + } + + case "process": + if err := dec.Decode(&s.Process); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "script_cache": + if s.ScriptCache == nil { + s.ScriptCache = make(map[string][]ScriptCache, 0) + } + rawMsg := make(map[string]json.RawMessage, 0) + dec.Decode(&rawMsg) + for key, value := range rawMsg { + switch { + case bytes.HasPrefix(value, []byte("\"")), bytes.HasPrefix(value, []byte("{")): + o := NewScriptCache() + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.ScriptCache[key] = append(s.ScriptCache[key], *o) + default: + o := []ScriptCache{} + err := json.NewDecoder(bytes.NewReader(value)).Decode(&o) + if err != nil { + return err + } + s.ScriptCache[key] = o + } + } + + case "thread_pool": + if s.ThreadPool == nil { + s.ThreadPool = make(map[string]ThreadCount, 0) + } + if err := dec.Decode(&s.ThreadPool); err != nil { + return err + } + + case "timestamp": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Timestamp = &value + case float64: + f := int64(v) + s.Timestamp = &f + } + + case "transport": + if err := dec.Decode(&s.Transport); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewStats returns a Stats. func NewStats() *Stats { r := &Stats{ diff --git a/typedapi/types/statsaggregate.go b/typedapi/types/statsaggregate.go old mode 100755 new mode 100644 index ec6fae742e..29cf9b533f --- a/typedapi/types/statsaggregate.go +++ b/typedapi/types/statsaggregate.go @@ -16,28 +16,137 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // StatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L239-L254 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L240-L255 type StatsAggregate struct { - Avg Float64 `json:"avg,omitempty"` - AvgAsString *string `json:"avg_as_string,omitempty"` - Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` - MaxAsString *string `json:"max_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` - MinAsString *string `json:"min_as_string,omitempty"` - Sum Float64 `json:"sum"` - SumAsString *string `json:"sum_as_string,omitempty"` + Avg Float64 `json:"avg,omitempty"` + AvgAsString *string `json:"avg_as_string,omitempty"` + Count int64 `json:"count"` + Max Float64 `json:"max,omitempty"` + MaxAsString *string `json:"max_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Min Float64 `json:"min,omitempty"` + MinAsString *string `json:"min_as_string,omitempty"` + Sum Float64 `json:"sum"` + SumAsString *string `json:"sum_as_string,omitempty"` +} + +func (s *StatsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + if err := dec.Decode(&s.Avg); err != nil { + return err + } + + case "avg_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AvgAsString = &o + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "max_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min": + if err := dec.Decode(&s.Min); err != nil { + return err + } + + case "min_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinAsString = &o + + case "sum": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sum = f + case float64: + f := Float64(v) + s.Sum = f + } + + case "sum_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumAsString = &o + + } + } + return nil } // NewStatsAggregate returns a StatsAggregate. diff --git a/typedapi/types/statsaggregation.go b/typedapi/types/statsaggregation.go old mode 100755 new mode 100644 index 5155c0a38c..c9c50498a8 --- a/typedapi/types/statsaggregation.go +++ b/typedapi/types/statsaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // StatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L145-L145 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L145-L145 type StatsAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type StatsAggregation struct { Script Script `json:"script,omitempty"` } +func (s *StatsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewStatsAggregation returns a StatsAggregation. func NewStatsAggregation() *StatsAggregation { r := &StatsAggregation{} diff --git a/typedapi/types/statsbucketaggregate.go b/typedapi/types/statsbucketaggregate.go old mode 100755 new mode 100644 index 2f6e3480a1..e3868f19ea --- a/typedapi/types/statsbucketaggregate.go +++ b/typedapi/types/statsbucketaggregate.go @@ -16,28 +16,137 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // StatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L256-L257 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L257-L258 type StatsBucketAggregate struct { - Avg Float64 `json:"avg,omitempty"` - AvgAsString *string `json:"avg_as_string,omitempty"` - Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` - MaxAsString *string `json:"max_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` - MinAsString *string `json:"min_as_string,omitempty"` - Sum Float64 `json:"sum"` - SumAsString *string `json:"sum_as_string,omitempty"` + Avg Float64 `json:"avg,omitempty"` + AvgAsString *string `json:"avg_as_string,omitempty"` + Count int64 `json:"count"` + Max Float64 `json:"max,omitempty"` + MaxAsString *string `json:"max_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Min Float64 `json:"min,omitempty"` + MinAsString *string `json:"min_as_string,omitempty"` + Sum Float64 `json:"sum"` + SumAsString *string `json:"sum_as_string,omitempty"` +} + +func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg": + if err := dec.Decode(&s.Avg); err != nil { + return err + } + + case "avg_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AvgAsString = &o + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "max": + if err := dec.Decode(&s.Max); err != nil { + return err + } + + case "max_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min": + if err := dec.Decode(&s.Min); err != nil { + return err + } + + case "min_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinAsString = &o + + case "sum": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Sum = f + case float64: + f := Float64(v) + s.Sum = f + } + + case "sum_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SumAsString = &o + + } + } + return nil } // NewStatsBucketAggregate returns a StatsBucketAggregate. diff --git a/typedapi/types/statsbucketaggregation.go b/typedapi/types/statsbucketaggregation.go old mode 100755 new mode 100644 index f0f3b61997..436a72337e --- a/typedapi/types/statsbucketaggregation.go +++ b/typedapi/types/statsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // StatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L284-L284 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L284-L284 type StatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/status.go b/typedapi/types/status.go old mode 100755 new mode 100644 index dfa6d7fcff..a888eaee57 --- a/typedapi/types/status.go +++ b/typedapi/types/status.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Status type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 type Status struct { IncludeGlobalState bool `json:"include_global_state"` Indices map[string]SnapshotIndexStats `json:"indices"` @@ -34,6 +44,87 @@ type Status struct { Uuid string `json:"uuid"` } +func (s *Status) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "include_global_state": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IncludeGlobalState = value + case bool: + s.IncludeGlobalState = v + } + + case "indices": + if s.Indices == nil { + s.Indices = make(map[string]SnapshotIndexStats, 0) + } + if err := dec.Decode(&s.Indices); err != nil { + return err + } + + case "repository": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Repository = o + + case "shards_stats": + if err := dec.Decode(&s.ShardsStats); err != nil { + return err + } + + case "snapshot": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Snapshot = o + + case "state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = o + + case "stats": + if err := dec.Decode(&s.Stats); err != nil { + return err + } + + case "uuid": + if err := dec.Decode(&s.Uuid); err != nil { + return err + } + + } + } + return nil +} + // NewStatus returns a Status. func NewStatus() *Status { r := &Status{ diff --git a/typedapi/types/stemmeroverridetokenfilter.go b/typedapi/types/stemmeroverridetokenfilter.go old mode 100755 new mode 100644 index af7b9d7cf5..38bcae40b9 --- a/typedapi/types/stemmeroverridetokenfilter.go +++ b/typedapi/types/stemmeroverridetokenfilter.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // StemmerOverrideTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L313-L317 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L314-L318 type StemmerOverrideTokenFilter struct { Rules []string `json:"rules,omitempty"` RulesPath *string `json:"rules_path,omitempty"` @@ -30,6 +38,49 @@ type StemmerOverrideTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *StemmerOverrideTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "rules": + if err := dec.Decode(&s.Rules); err != nil { + return err + } + + case "rules_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RulesPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewStemmerOverrideTokenFilter returns a StemmerOverrideTokenFilter. func NewStemmerOverrideTokenFilter() *StemmerOverrideTokenFilter { r := &StemmerOverrideTokenFilter{} diff --git a/typedapi/types/stemmertokenfilter.go b/typedapi/types/stemmertokenfilter.go old mode 100755 new mode 100644 index f348b4e957..b4db1be2d2 --- a/typedapi/types/stemmertokenfilter.go +++ b/typedapi/types/stemmertokenfilter.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // StemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L319-L322 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L320-L324 type StemmerTokenFilter struct { - Language string `json:"language"` + Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *StemmerTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "language", "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Language = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewStemmerTokenFilter returns a StemmerTokenFilter. func NewStemmerTokenFilter() *StemmerTokenFilter { r := &StemmerTokenFilter{} diff --git a/typedapi/types/stepkey.go b/typedapi/types/stepkey.go old mode 100755 new mode 100644 index 7ea5c5c86c..e7ffe1cae2 --- a/typedapi/types/stepkey.go +++ b/typedapi/types/stepkey.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // StepKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ilm/move_to_step/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ilm/move_to_step/types.ts#L20-L24 type StepKey struct { Action string `json:"action"` Name string `json:"name"` diff --git a/typedapi/types/stopanalyzer.go b/typedapi/types/stopanalyzer.go old mode 100755 new mode 100644 index 838b6ead12..51c513f30c --- a/typedapi/types/stopanalyzer.go +++ b/typedapi/types/stopanalyzer.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // StopAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L101-L106 type StopAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` @@ -30,6 +38,60 @@ type StopAnalyzer struct { Version *string `json:"version,omitempty"` } +func (s *StopAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "stopwords_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StopwordsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewStopAnalyzer returns a StopAnalyzer. func NewStopAnalyzer() *StopAnalyzer { r := &StopAnalyzer{} diff --git a/typedapi/types/stoptokenfilter.go b/typedapi/types/stoptokenfilter.go old mode 100755 new mode 100644 index af64c041b7..be09272480 --- a/typedapi/types/stoptokenfilter.go +++ b/typedapi/types/stoptokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StopTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L96-L102 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L97-L103 type StopTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` RemoveTrailing *bool `json:"remove_trailing,omitempty"` @@ -32,6 +42,88 @@ type StopTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "ignore_case": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreCase = &value + case bool: + s.IgnoreCase = &v + } + + case "remove_trailing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.RemoveTrailing = &value + case bool: + s.RemoveTrailing = &v + } + + case "stopwords": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Stopwords = append(s.Stopwords, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Stopwords); err != nil { + return err + } + } + + case "stopwords_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StopwordsPath = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewStopTokenFilter returns a StopTokenFilter. func NewStopTokenFilter() *StopTokenFilter { r := &StopTokenFilter{} diff --git a/typedapi/types/stopwords.go b/typedapi/types/stopwords.go old mode 100755 new mode 100644 index c26ba4cbb1..adb37592e9 --- a/typedapi/types/stopwords.go +++ b/typedapi/types/stopwords.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // StopWords type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/StopWords.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/StopWords.ts#L20-L26 type StopWords []string diff --git a/typedapi/types/storage.go b/typedapi/types/storage.go old mode 100755 new mode 100644 index bd104c0309..20f39a86ff --- a/typedapi/types/storage.go +++ b/typedapi/types/storage.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/storagetype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // Storage type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L497-L506 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L497-L506 type Storage struct { // AllowMmap You can restrict the use of the mmapfs and the related hybridfs store type // via the setting node.store.allow_mmap. @@ -39,6 +47,45 @@ type Storage struct { Type storagetype.StorageType `json:"type"` } +func (s *Storage) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_mmap": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowMmap = &value + case bool: + s.AllowMmap = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil +} + // NewStorage returns a Storage. func NewStorage() *Storage { r := &Storage{} diff --git a/typedapi/types/storedscript.go b/typedapi/types/storedscript.go old mode 100755 new mode 100644 index 72fc26a869..178973ec6b --- a/typedapi/types/storedscript.go +++ b/typedapi/types/storedscript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // StoredScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L35-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L35-L39 type StoredScript struct { Lang scriptlanguage.ScriptLanguage `json:"lang"` Options map[string]string `json:"options,omitempty"` diff --git a/typedapi/types/storedscriptid.go b/typedapi/types/storedscriptid.go old mode 100755 new mode 100644 index 34bf0f9126..654b7976a3 --- a/typedapi/types/storedscriptid.go +++ b/typedapi/types/storedscriptid.go @@ -16,22 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // StoredScriptId type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Scripting.ts#L52-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Scripting.ts#L52-L54 type StoredScriptId struct { Id string `json:"id"` Params map[string]json.RawMessage `json:"params,omitempty"` } +func (s *StoredScriptId) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + } + } + return nil +} + // NewStoredScriptId returns a StoredScriptId. func NewStoredScriptId() *StoredScriptId { r := &StoredScriptId{ diff --git a/typedapi/types/storestats.go b/typedapi/types/storestats.go old mode 100755 new mode 100644 index 9551da7ca5..90a13865f2 --- a/typedapi/types/storestats.go +++ b/typedapi/types/storestats.go @@ -16,20 +16,110 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StoreStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L233-L240 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L233-L240 type StoreStats struct { Reserved ByteSize `json:"reserved,omitempty"` - ReservedInBytes int `json:"reserved_in_bytes"` + ReservedInBytes int64 `json:"reserved_in_bytes"` Size ByteSize `json:"size,omitempty"` - SizeInBytes int `json:"size_in_bytes"` + SizeInBytes int64 `json:"size_in_bytes"` TotalDataSetSize ByteSize `json:"total_data_set_size,omitempty"` - TotalDataSetSizeInBytes *int `json:"total_data_set_size_in_bytes,omitempty"` + TotalDataSetSizeInBytes *int64 `json:"total_data_set_size_in_bytes,omitempty"` +} + +func (s *StoreStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "reserved": + if err := dec.Decode(&s.Reserved); err != nil { + return err + } + + case "reserved_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ReservedInBytes = value + case float64: + f := int64(v) + s.ReservedInBytes = f + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + case "total_data_set_size": + if err := dec.Decode(&s.TotalDataSetSize); err != nil { + return err + } + + case "total_data_set_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalDataSetSizeInBytes = &value + case float64: + f := int64(v) + s.TotalDataSetSizeInBytes = &f + } + + } + } + return nil } // NewStoreStats returns a StoreStats. diff --git a/typedapi/core/ping/response.go b/typedapi/types/stringifiedboolean.go old mode 100755 new mode 100644 similarity index 65% rename from typedapi/core/ping/response.go rename to typedapi/types/stringifiedboolean.go index ac9c483266..9b679c824c --- a/typedapi/core/ping/response.go +++ b/typedapi/types/stringifiedboolean.go @@ -16,19 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package ping +package types -// Response holds the response body struct for the package ping +// Stringifiedboolean holds the union for the following types: // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/ping/PingResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} +// bool +// string +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/Stringified.ts#L20-L27 +type Stringifiedboolean interface{} diff --git a/typedapi/types/stringifiedepochtimeunitmillis.go b/typedapi/types/stringifiedepochtimeunitmillis.go old mode 100755 new mode 100644 index 066cbe229e..c9af94732a --- a/typedapi/types/stringifiedepochtimeunitmillis.go +++ b/typedapi/types/stringifiedepochtimeunitmillis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedEpochTimeUnitMillis interface{} diff --git a/typedapi/types/stringifiedepochtimeunitseconds.go b/typedapi/types/stringifiedepochtimeunitseconds.go old mode 100755 new mode 100644 index 1610988717..02166d9753 --- a/typedapi/types/stringifiedepochtimeunitseconds.go +++ b/typedapi/types/stringifiedepochtimeunitseconds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedEpochTimeUnitSeconds interface{} diff --git a/typedapi/core/exists/response.go b/typedapi/types/stringifiedinteger.go old mode 100755 new mode 100644 similarity index 64% rename from typedapi/core/exists/response.go rename to typedapi/types/stringifiedinteger.go index ad26f833e2..fd233ae973 --- a/typedapi/core/exists/response.go +++ b/typedapi/types/stringifiedinteger.go @@ -16,19 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 -package exists +package types -// Response holds the response body struct for the package exists +// Stringifiedinteger holds the union for the following types: // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/exists/DocumentExistsResponse.ts#L22-L24 - -type Response struct { -} - -// NewResponse returns a Response -func NewResponse() *Response { - r := &Response{} - return r -} +// int +// string +// +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/Stringified.ts#L20-L27 +type Stringifiedinteger interface{} diff --git a/typedapi/types/stringifiedversionnumber.go b/typedapi/types/stringifiedversionnumber.go old mode 100755 new mode 100644 index c93219f18b..a366cb770b --- a/typedapi/types/stringifiedversionnumber.go +++ b/typedapi/types/stringifiedversionnumber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_spec_utils/Stringified.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_spec_utils/Stringified.ts#L20-L27 type StringifiedVersionNumber interface{} diff --git a/typedapi/types/stringraretermsaggregate.go b/typedapi/types/stringraretermsaggregate.go old mode 100755 new mode 100644 index 1de49b0c9c..6837439ff7 --- a/typedapi/types/stringraretermsaggregate.go +++ b/typedapi/types/stringraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // StringRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L442-L446 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L443-L447 type StringRareTermsAggregate struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *StringRareTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *StringRareTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]StringRareTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []StringRareTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/stringraretermsbucket.go b/typedapi/types/stringraretermsbucket.go old mode 100755 new mode 100644 index 9715f48dcb..e705ba5223 --- a/typedapi/types/stringraretermsbucket.go +++ b/typedapi/types/stringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // StringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L448-L450 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L449-L451 type StringRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -42,6 +44,7 @@ type StringRareTermsBucket struct { } func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,456 +58,540 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { + case "key": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Key = o - case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } } } @@ -531,6 +618,7 @@ func (s StringRareTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/stringstatsaggregate.go b/typedapi/types/stringstatsaggregate.go old mode 100755 new mode 100644 index 189f3fedd8..96536c5b7b --- a/typedapi/types/stringstatsaggregate.go +++ b/typedapi/types/stringstatsaggregate.go @@ -16,28 +16,123 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // StringStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L684-L695 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L693-L704 type StringStatsAggregate struct { - AvgLength Float64 `json:"avg_length,omitempty"` - AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` - Count int64 `json:"count"` - Distribution map[string]Float64 `json:"distribution,omitempty"` - Entropy Float64 `json:"entropy,omitempty"` - MaxLength int `json:"max_length,omitempty"` - MaxLengthAsString *string `json:"max_length_as_string,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - MinLength int `json:"min_length,omitempty"` - MinLengthAsString *string `json:"min_length_as_string,omitempty"` + AvgLength Float64 `json:"avg_length,omitempty"` + AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` + Count int64 `json:"count"` + Distribution map[string]Float64 `json:"distribution,omitempty"` + Entropy Float64 `json:"entropy,omitempty"` + MaxLength int `json:"max_length,omitempty"` + MaxLengthAsString *string `json:"max_length_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + MinLength int `json:"min_length,omitempty"` + MinLengthAsString *string `json:"min_length_as_string,omitempty"` +} + +func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "avg_length": + if err := dec.Decode(&s.AvgLength); err != nil { + return err + } + + case "avg_length_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AvgLengthAsString = &o + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + case "distribution": + if err := dec.Decode(&s.Distribution); err != nil { + return err + } + + case "entropy": + if err := dec.Decode(&s.Entropy); err != nil { + return err + } + + case "max_length": + if err := dec.Decode(&s.MaxLength); err != nil { + return err + } + + case "max_length_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxLengthAsString = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "min_length": + if err := dec.Decode(&s.MinLength); err != nil { + return err + } + + case "min_length_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MinLengthAsString = &o + + } + } + return nil } // NewStringStatsAggregate returns a StringStatsAggregate. diff --git a/typedapi/types/stringstatsaggregation.go b/typedapi/types/stringstatsaggregation.go old mode 100755 new mode 100644 index 10c9f37f1e..1c86531422 --- a/typedapi/types/stringstatsaggregation.go +++ b/typedapi/types/stringstatsaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StringStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L147-L149 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L147-L149 type StringStatsAggregation struct { Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` @@ -30,6 +40,55 @@ type StringStatsAggregation struct { ShowDistribution *bool `json:"show_distribution,omitempty"` } +func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "show_distribution": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ShowDistribution = &value + case bool: + s.ShowDistribution = &v + } + + } + } + return nil +} + // NewStringStatsAggregation returns a StringStatsAggregation. func NewStringStatsAggregation() *StringStatsAggregation { r := &StringStatsAggregation{} diff --git a/typedapi/types/stringtermsaggregate.go b/typedapi/types/stringtermsaggregate.go old mode 100755 new mode 100644 index 6481d557c3..aca0a87d72 --- a/typedapi/types/stringtermsaggregate.go +++ b/typedapi/types/stringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // StringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L383-L388 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L384-L389 type StringTermsAggregate struct { - Buckets BucketsStringTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsStringTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]StringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []StringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/stringtermsbucket.go b/typedapi/types/stringtermsbucket.go old mode 100755 new mode 100644 index bab154979a..9573472725 --- a/typedapi/types/stringtermsbucket.go +++ b/typedapi/types/stringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // StringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L394-L396 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L395-L397 type StringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -43,6 +45,7 @@ type StringTermsBucket struct { } func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -56,456 +59,34 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "doc_count_error": - if err := dec.Decode(&s.DocCountError); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountError = &value + case float64: + f := int64(v) + s.DocCountError = &f } case "key": @@ -513,6 +94,519 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -537,6 +631,7 @@ func (s StringTermsBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/stupidbackoffsmoothingmodel.go b/typedapi/types/stupidbackoffsmoothingmodel.go old mode 100755 new mode 100644 index ef9803fe07..53dabf800f --- a/typedapi/types/stupidbackoffsmoothingmodel.go +++ b/typedapi/types/stupidbackoffsmoothingmodel.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // StupidBackoffSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L233-L235 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L237-L239 type StupidBackoffSmoothingModel struct { Discount Float64 `json:"discount"` } +func (s *StupidBackoffSmoothingModel) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "discount": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Discount = f + case float64: + f := Float64(v) + s.Discount = f + } + + } + } + return nil +} + // NewStupidBackoffSmoothingModel returns a StupidBackoffSmoothingModel. func NewStupidBackoffSmoothingModel() *StupidBackoffSmoothingModel { r := &StupidBackoffSmoothingModel{} diff --git a/typedapi/types/suggest.go b/typedapi/types/suggest.go old mode 100755 new mode 100644 index 434eea29f4..0234502c9b --- a/typedapi/types/suggest.go +++ b/typedapi/types/suggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ package types // PhraseSuggest // TermSuggest // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L34-L40 type Suggest interface{} diff --git a/typedapi/types/suggestcontext.go b/typedapi/types/suggestcontext.go old mode 100755 new mode 100644 index fef279c10d..bc45ba535c --- a/typedapi/types/suggestcontext.go +++ b/typedapi/types/suggestcontext.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SuggestContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L37-L42 type SuggestContext struct { Name string `json:"name"` Path *string `json:"path,omitempty"` @@ -30,6 +38,52 @@ type SuggestContext struct { Type string `json:"type"` } +func (s *SuggestContext) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + case "precision": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Precision = o + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewSuggestContext returns a SuggestContext. func NewSuggestContext() *SuggestContext { r := &SuggestContext{} diff --git a/typedapi/types/suggester.go b/typedapi/types/suggester.go old mode 100755 new mode 100644 index 43823eadce..7801be0263 --- a/typedapi/types/suggester.go +++ b/typedapi/types/suggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,7 +27,7 @@ import ( // Suggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L101-L104 type Suggester struct { Suggesters map[string]FieldSuggester `json:"-"` // Text Global suggest text, to avoid repetition when the same text is used in @@ -54,6 +54,7 @@ func (s Suggester) MarshalJSON() ([]byte, error) { for key, value := range s.Suggesters { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Suggesters") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/suggestfuzziness.go b/typedapi/types/suggestfuzziness.go old mode 100755 new mode 100644 index b51ffe321a..1c0efd98e6 --- a/typedapi/types/suggestfuzziness.go +++ b/typedapi/types/suggestfuzziness.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // SuggestFuzziness type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L138-L144 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L142-L148 type SuggestFuzziness struct { Fuzziness Fuzziness `json:"fuzziness,omitempty"` MinLength *int `json:"min_length,omitempty"` @@ -31,6 +41,91 @@ type SuggestFuzziness struct { UnicodeAware *bool `json:"unicode_aware,omitempty"` } +func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fuzziness": + if err := dec.Decode(&s.Fuzziness); err != nil { + return err + } + + case "min_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinLength = &value + case float64: + f := int(v) + s.MinLength = &f + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "transpositions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Transpositions = &value + case bool: + s.Transpositions = &v + } + + case "unicode_aware": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.UnicodeAware = &value + case bool: + s.UnicodeAware = &v + } + + } + } + return nil +} + // NewSuggestFuzziness returns a SuggestFuzziness. func NewSuggestFuzziness() *SuggestFuzziness { r := &SuggestFuzziness{} diff --git a/typedapi/types/sumaggregate.go b/typedapi/types/sumaggregate.go old mode 100755 new mode 100644 index 6c4cb18053..b427570112 --- a/typedapi/types/sumaggregate.go +++ b/typedapi/types/sumaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // SumAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L202-L206 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L203-L207 type SumAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type SumAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *SumAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewSumAggregate returns a SumAggregate. func NewSumAggregate() *SumAggregate { r := &SumAggregate{} diff --git a/typedapi/types/sumaggregation.go b/typedapi/types/sumaggregation.go old mode 100755 new mode 100644 index 4e9a8ffb1e..b92e126d10 --- a/typedapi/types/sumaggregation.go +++ b/typedapi/types/sumaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // SumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L151-L151 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L151-L151 type SumAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type SumAggregation struct { Script Script `json:"script,omitempty"` } +func (s *SumAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewSumAggregation returns a SumAggregation. func NewSumAggregation() *SumAggregation { r := &SumAggregation{} diff --git a/typedapi/types/sumbucketaggregation.go b/typedapi/types/sumbucketaggregation.go old mode 100755 new mode 100644 index 9263b874f0..8b2c1241b1 --- a/typedapi/types/sumbucketaggregation.go +++ b/typedapi/types/sumbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -32,17 +32,18 @@ import ( // SumBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/pipeline.ts#L286-L286 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/pipeline.ts#L286-L286 type SumBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. - BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Format *string `json:"format,omitempty"` - GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + BucketsPath BucketsPath `json:"buckets_path,omitempty"` + Format *string `json:"format,omitempty"` + GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` } func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,9 +63,12 @@ func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "gap_policy": if err := dec.Decode(&s.GapPolicy); err != nil { @@ -77,9 +81,12 @@ func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o } } diff --git a/typedapi/types/summary.go b/typedapi/types/summary.go old mode 100755 new mode 100644 index 6e93456024..20a0ead38d --- a/typedapi/types/summary.go +++ b/typedapi/types/summary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // Summary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/enrich/_types/Policy.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/enrich/_types/Policy.ts#L23-L25 type Summary struct { Config map[policytype.PolicyType]EnrichPolicy `json:"config"` } diff --git a/typedapi/types/synccontainer.go b/typedapi/types/synccontainer.go old mode 100755 new mode 100644 index e1fc30fdce..ff46fe521c --- a/typedapi/types/synccontainer.go +++ b/typedapi/types/synccontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // SyncContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L167-L173 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L167-L173 type SyncContainer struct { // Time Specifies that the transform uses a time field to synchronize the source and // destination indices. diff --git a/typedapi/types/synonymgraphtokenfilter.go b/typedapi/types/synonymgraphtokenfilter.go old mode 100755 new mode 100644 index a898f0fd0a..68a5ed1742 --- a/typedapi/types/synonymgraphtokenfilter.go +++ b/typedapi/types/synonymgraphtokenfilter.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/synonymformat" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SynonymGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L109-L118 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L110-L119 type SynonymGraphTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` @@ -39,6 +47,104 @@ type SynonymGraphTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expand": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Expand = &value + case bool: + s.Expand = &v + } + + case "format": + if err := dec.Decode(&s.Format); err != nil { + return err + } + + case "lenient": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v + } + + case "synonyms": + if err := dec.Decode(&s.Synonyms); err != nil { + return err + } + + case "synonyms_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SynonymsPath = &o + + case "tokenizer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tokenizer = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "updateable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Updateable = &value + case bool: + s.Updateable = &v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSynonymGraphTokenFilter returns a SynonymGraphTokenFilter. func NewSynonymGraphTokenFilter() *SynonymGraphTokenFilter { r := &SynonymGraphTokenFilter{} diff --git a/typedapi/types/synonymtokenfilter.go b/typedapi/types/synonymtokenfilter.go old mode 100755 new mode 100644 index e27cfe708b..00ee549d27 --- a/typedapi/types/synonymtokenfilter.go +++ b/typedapi/types/synonymtokenfilter.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/synonymformat" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // SynonymTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L120-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L121-L130 type SynonymTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` @@ -39,6 +47,104 @@ type SynonymTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "expand": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Expand = &value + case bool: + s.Expand = &v + } + + case "format": + if err := dec.Decode(&s.Format); err != nil { + return err + } + + case "lenient": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Lenient = &value + case bool: + s.Lenient = &v + } + + case "synonyms": + if err := dec.Decode(&s.Synonyms); err != nil { + return err + } + + case "synonyms_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SynonymsPath = &o + + case "tokenizer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tokenizer = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "updateable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Updateable = &value + case bool: + s.Updateable = &v + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewSynonymTokenFilter returns a SynonymTokenFilter. func NewSynonymTokenFilter() *SynonymTokenFilter { r := &SynonymTokenFilter{} diff --git a/typedapi/types/targetmeanencodingpreprocessor.go b/typedapi/types/targetmeanencodingpreprocessor.go old mode 100755 new mode 100644 index 026768dcf3..7046ff2e15 --- a/typedapi/types/targetmeanencodingpreprocessor.go +++ b/typedapi/types/targetmeanencodingpreprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TargetMeanEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L49-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L49-L54 type TargetMeanEncodingPreprocessor struct { DefaultValue Float64 `json:"default_value"` FeatureName string `json:"feature_name"` @@ -30,6 +40,66 @@ type TargetMeanEncodingPreprocessor struct { TargetMap map[string]Float64 `json:"target_map"` } +func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "default_value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.DefaultValue = f + case float64: + f := Float64(v) + s.DefaultValue = f + } + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Field = o + + case "target_map": + if s.TargetMap == nil { + s.TargetMap = make(map[string]Float64, 0) + } + if err := dec.Decode(&s.TargetMap); err != nil { + return err + } + + } + } + return nil +} + // NewTargetMeanEncodingPreprocessor returns a TargetMeanEncodingPreprocessor. func NewTargetMeanEncodingPreprocessor() *TargetMeanEncodingPreprocessor { r := &TargetMeanEncodingPreprocessor{ diff --git a/typedapi/types/taskfailure.go b/typedapi/types/taskfailure.go old mode 100755 new mode 100644 index a7fe74cb37..54fa3b1540 --- a/typedapi/types/taskfailure.go +++ b/typedapi/types/taskfailure.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TaskFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Errors.ts#L66-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Errors.ts#L66-L71 type TaskFailure struct { NodeId string `json:"node_id"` Reason ErrorCause `json:"reason"` @@ -30,6 +40,59 @@ type TaskFailure struct { TaskId int64 `json:"task_id"` } +func (s *TaskFailure) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "reason": + if err := dec.Decode(&s.Reason); err != nil { + return err + } + + case "status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Status = o + + case "task_id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TaskId = value + case float64: + f := int64(v) + s.TaskId = f + } + + } + } + return nil +} + // NewTaskFailure returns a TaskFailure. func NewTaskFailure() *TaskFailure { r := &TaskFailure{} diff --git a/typedapi/types/taskid.go b/typedapi/types/taskid.go old mode 100755 new mode 100644 index 724a3718a3..93eb9a0496 --- a/typedapi/types/taskid.go +++ b/typedapi/types/taskid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L113-L113 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L113-L113 type TaskId interface{} diff --git a/typedapi/types/taskinfo.go b/typedapi/types/taskinfo.go old mode 100755 new mode 100644 index 76e2825cde..c8dc93d8ef --- a/typedapi/types/taskinfo.go +++ b/typedapi/types/taskinfo.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/TaskInfo.ts#L32-L46 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/TaskInfo.ts#L32-L46 type TaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -39,6 +49,131 @@ type TaskInfo struct { Type string `json:"type"` } +func (s *TaskInfo) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Action = o + + case "cancellable": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Cancellable = value + case bool: + s.Cancellable = v + } + + case "cancelled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Cancelled = &value + case bool: + s.Cancelled = &v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "id": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Id = value + case float64: + f := int64(v) + s.Id = f + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "parent_task_id": + if err := dec.Decode(&s.ParentTaskId); err != nil { + return err + } + + case "running_time": + if err := dec.Decode(&s.RunningTime); err != nil { + return err + } + + case "running_time_in_nanos": + if err := dec.Decode(&s.RunningTimeInNanos); err != nil { + return err + } + + case "start_time_in_millis": + if err := dec.Decode(&s.StartTimeInMillis); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewTaskInfo returns a TaskInfo. func NewTaskInfo() *TaskInfo { r := &TaskInfo{ diff --git a/typedapi/types/taskinfos.go b/typedapi/types/taskinfos.go old mode 100755 new mode 100644 index a6bc67b314..78255c3c92 --- a/typedapi/types/taskinfos.go +++ b/typedapi/types/taskinfos.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // []TaskInfo // map[string]ParentTaskInfo // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 type TaskInfos interface{} diff --git a/typedapi/types/tasksrecord.go b/typedapi/types/tasksrecord.go old mode 100755 new mode 100644 index 9560dbaccf..f05d179c45 --- a/typedapi/types/tasksrecord.go +++ b/typedapi/types/tasksrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/tasks/types.ts#L22-L101 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/tasks/types.ts#L22-L101 type TasksRecord struct { // Action task action Action *string `json:"action,omitempty"` @@ -58,6 +66,142 @@ type TasksRecord struct { XOpaqueId *string `json:"x_opaque_id,omitempty"` } +func (s *TasksRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action", "ac": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Action = &o + + case "description", "desc": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "ip", "i": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Ip = &o + + case "node", "n": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = &o + + case "node_id", "ni": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "parent_task_id", "pti": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ParentTaskId = &o + + case "port", "po": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Port = &o + + case "running_time", "time": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RunningTime = &o + + case "running_time_ns": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RunningTimeNs = &o + + case "start_time", "start": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.StartTime = &o + + case "task_id", "ti": + if err := dec.Decode(&s.TaskId); err != nil { + return err + } + + case "timestamp", "ts", "hms", "hhmmss": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Timestamp = &o + + case "type", "ty": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + case "x_opaque_id", "x": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.XOpaqueId = &o + + } + } + return nil +} + // NewTasksRecord returns a TasksRecord. func NewTasksRecord() *TasksRecord { r := &TasksRecord{} diff --git a/typedapi/types/taskstatus.go b/typedapi/types/taskstatus.go old mode 100755 new mode 100644 index f1085c3558..6956e3c6dd --- a/typedapi/types/taskstatus.go +++ b/typedapi/types/taskstatus.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TaskStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/tasks/_types/TaskStatus.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/tasks/_types/TaskStatus.ts#L24-L42 type TaskStatus struct { Batches int64 `json:"batches"` Canceled *string `json:"canceled,omitempty"` @@ -43,6 +53,204 @@ type TaskStatus struct { VersionConflicts int64 `json:"version_conflicts"` } +func (s *TaskStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "batches": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Batches = value + case float64: + f := int64(v) + s.Batches = f + } + + case "canceled": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Canceled = &o + + case "created": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Created = value + case float64: + f := int64(v) + s.Created = f + } + + case "deleted": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Deleted = value + case float64: + f := int64(v) + s.Deleted = f + } + + case "failures": + if err := dec.Decode(&s.Failures); err != nil { + return err + } + + case "noops": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Noops = value + case float64: + f := int64(v) + s.Noops = f + } + + case "requests_per_second": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.RequestsPerSecond = f + case float64: + f := float32(v) + s.RequestsPerSecond = f + } + + case "retries": + if err := dec.Decode(&s.Retries); err != nil { + return err + } + + case "throttled": + if err := dec.Decode(&s.Throttled); err != nil { + return err + } + + case "throttled_millis": + if err := dec.Decode(&s.ThrottledMillis); err != nil { + return err + } + + case "throttled_until": + if err := dec.Decode(&s.ThrottledUntil); err != nil { + return err + } + + case "throttled_until_millis": + if err := dec.Decode(&s.ThrottledUntilMillis); err != nil { + return err + } + + case "timed_out": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimedOut = &value + case bool: + s.TimedOut = &v + } + + case "took": + if err := dec.Decode(&s.Took); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "updated": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Updated = value + case float64: + f := int64(v) + s.Updated = f + } + + case "version_conflicts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.VersionConflicts = value + case float64: + f := int64(v) + s.VersionConflicts = f + } + + } + } + return nil +} + // NewTaskStatus returns a TaskStatus. func NewTaskStatus() *TaskStatus { r := &TaskStatus{} diff --git a/typedapi/types/tdigest.go b/typedapi/types/tdigest.go old mode 100755 new mode 100644 index 0c5d26110b..b31c77ebbf --- a/typedapi/types/tdigest.go +++ b/typedapi/types/tdigest.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TDigest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L123-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L123-L125 type TDigest struct { Compression *int `json:"compression,omitempty"` } +func (s *TDigest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compression": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Compression = &value + case float64: + f := int(v) + s.Compression = &f + } + + } + } + return nil +} + // NewTDigest returns a TDigest. func NewTDigest() *TDigest { r := &TDigest{} diff --git a/typedapi/types/tdigestpercentileranksaggregate.go b/typedapi/types/tdigestpercentileranksaggregate.go old mode 100755 new mode 100644 index 6152151aa5..6210f696e7 --- a/typedapi/types/tdigestpercentileranksaggregate.go +++ b/typedapi/types/tdigestpercentileranksaggregate.go @@ -16,20 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TDigestPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L174-L175 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L175-L176 type TDigestPercentileRanksAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Values Percentiles `json:"values"` + Meta Metadata `json:"meta,omitempty"` + Values Percentiles `json:"values"` +} + +func (s *TDigestPercentileRanksAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "values": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(KeyedPercentiles, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + case '[': + o := []ArrayPercentilesItem{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + } + + } + } + return nil } // NewTDigestPercentileRanksAggregate returns a TDigestPercentileRanksAggregate. diff --git a/typedapi/types/tdigestpercentilesaggregate.go b/typedapi/types/tdigestpercentilesaggregate.go old mode 100755 new mode 100644 index 36fe2117b3..dd5e9a8a57 --- a/typedapi/types/tdigestpercentilesaggregate.go +++ b/typedapi/types/tdigestpercentilesaggregate.go @@ -16,20 +16,70 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TDigestPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L171-L172 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L172-L173 type TDigestPercentilesAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Values Percentiles `json:"values"` + Meta Metadata `json:"meta,omitempty"` + Values Percentiles `json:"values"` +} + +func (s *TDigestPercentilesAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "values": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + localDec := json.NewDecoder(source) + switch rawMsg[0] { + case '{': + o := make(KeyedPercentiles, 0) + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + case '[': + o := []ArrayPercentilesItem{} + if err := localDec.Decode(&o); err != nil { + return err + } + s.Values = o + } + + } + } + return nil } // NewTDigestPercentilesAggregate returns a TDigestPercentilesAggregate. diff --git a/typedapi/types/template.go b/typedapi/types/template.go old mode 100755 new mode 100644 index c46862a4b9..2ea2371f31 --- a/typedapi/types/template.go +++ b/typedapi/types/template.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Template type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 type Template struct { Aliases map[string]Alias `json:"aliases"` Mappings TypeMapping `json:"mappings"` Settings IndexSettings `json:"settings"` } +func (s *Template) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]Alias, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + } + } + return nil +} + // NewTemplate returns a Template. func NewTemplate() *Template { r := &Template{ diff --git a/typedapi/types/templatemapping.go b/typedapi/types/templatemapping.go old mode 100755 new mode 100644 index 1bae0bdd16..f9d3d57416 --- a/typedapi/types/templatemapping.go +++ b/typedapi/types/templatemapping.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // TemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/TemplateMapping.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/TemplateMapping.ts#L27-L34 type TemplateMapping struct { Aliases map[string]Alias `json:"aliases"` IndexPatterns []string `json:"index_patterns"` @@ -36,6 +42,73 @@ type TemplateMapping struct { Version *int64 `json:"version,omitempty"` } +func (s *TemplateMapping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "aliases": + if s.Aliases == nil { + s.Aliases = make(map[string]Alias, 0) + } + if err := dec.Decode(&s.Aliases); err != nil { + return err + } + + case "index_patterns": + if err := dec.Decode(&s.IndexPatterns); err != nil { + return err + } + + case "mappings": + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + case "order": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Order = value + case float64: + f := int(v) + s.Order = f + } + + case "settings": + if s.Settings == nil { + s.Settings = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTemplateMapping returns a TemplateMapping. func NewTemplateMapping() *TemplateMapping { r := &TemplateMapping{ diff --git a/typedapi/types/templatesrecord.go b/typedapi/types/templatesrecord.go old mode 100755 new mode 100644 index 5e5cc2a78f..c452772529 --- a/typedapi/types/templatesrecord.go +++ b/typedapi/types/templatesrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TemplatesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/templates/types.ts#L22-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/templates/types.ts#L22-L48 type TemplatesRecord struct { // ComposedOf component templates comprising index template ComposedOf *string `json:"composed_of,omitempty"` @@ -36,6 +44,60 @@ type TemplatesRecord struct { Version string `json:"version,omitempty"` } +func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "composed_of", "c": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ComposedOf = &o + + case "index_patterns", "t": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexPatterns = &o + + case "name", "n": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "order", "o", "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Order = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTemplatesRecord returns a TemplatesRecord. func NewTemplatesRecord() *TemplatesRecord { r := &TemplatesRecord{} diff --git a/typedapi/types/term.go b/typedapi/types/term.go old mode 100755 new mode 100644 index d5247d1735..8ce3456711 --- a/typedapi/types/term.go +++ b/typedapi/types/term.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Term type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/types.ts#L34-L40 type Term struct { DocFreq *int `json:"doc_freq,omitempty"` Score *Float64 `json:"score,omitempty"` @@ -31,6 +41,95 @@ type Term struct { Ttf *int `json:"ttf,omitempty"` } +func (s *Term) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "doc_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DocFreq = &value + case float64: + f := int(v) + s.DocFreq = &f + } + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = &f + case float64: + f := Float64(v) + s.Score = &f + } + + case "term_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TermFreq = value + case float64: + f := int(v) + s.TermFreq = f + } + + case "tokens": + if err := dec.Decode(&s.Tokens); err != nil { + return err + } + + case "ttf": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Ttf = &value + case float64: + f := int(v) + s.Ttf = &f + } + + } + } + return nil +} + // NewTerm returns a Term. func NewTerm() *Term { r := &Term{} diff --git a/typedapi/types/termquery.go b/typedapi/types/termquery.go old mode 100755 new mode 100644 index 9bd47a0713..ab52079e19 --- a/typedapi/types/termquery.go +++ b/typedapi/types/termquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L116-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L116-L121 type TermQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` @@ -30,6 +40,74 @@ type TermQuery struct { Value FieldValue `json:"value"` } +func (s *TermQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "case_insensitive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CaseInsensitive = &value + case bool: + s.CaseInsensitive = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + } + } + return nil +} + // NewTermQuery returns a TermQuery. func NewTermQuery() *TermQuery { r := &TermQuery{} diff --git a/typedapi/types/termsaggregatebasedoubletermsbucket.go b/typedapi/types/termsaggregatebasedoubletermsbucket.go old mode 100755 new mode 100644 index 6ee70ca2be..eb0e144568 --- a/typedapi/types/termsaggregatebasedoubletermsbucket.go +++ b/typedapi/types/termsaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L376-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseDoubleTermsBucket struct { - Buckets BucketsDoubleTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsDoubleTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]DoubleTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []DoubleTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/termsaggregatebaselongtermsbucket.go b/typedapi/types/termsaggregatebaselongtermsbucket.go old mode 100755 new mode 100644 index b0d333f615..014cd05f8d --- a/typedapi/types/termsaggregatebaselongtermsbucket.go +++ b/typedapi/types/termsaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L376-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseLongTermsBucket struct { - Buckets BucketsLongTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsLongTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]LongTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []LongTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/termsaggregatebasemultitermsbucket.go b/typedapi/types/termsaggregatebasemultitermsbucket.go old mode 100755 new mode 100644 index deb6adfeae..d0f51522ec --- a/typedapi/types/termsaggregatebasemultitermsbucket.go +++ b/typedapi/types/termsaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L376-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseMultiTermsBucket struct { - Buckets BucketsMultiTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsMultiTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]MultiTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []MultiTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/termsaggregatebasestringtermsbucket.go b/typedapi/types/termsaggregatebasestringtermsbucket.go old mode 100755 new mode 100644 index 32f13e2d72..758563162f --- a/typedapi/types/termsaggregatebasestringtermsbucket.go +++ b/typedapi/types/termsaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L376-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseStringTermsBucket struct { - Buckets BucketsStringTermsBucket `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsStringTermsBucket `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]StringTermsBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []StringTermsBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/termsaggregatebasevoid.go b/typedapi/types/termsaggregatebasevoid.go old mode 100755 new mode 100644 index 9c37d26633..ac4b6bcbc5 --- a/typedapi/types/termsaggregatebasevoid.go +++ b/typedapi/types/termsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L376-L381 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L377-L382 type TermsAggregateBaseVoid struct { - Buckets BucketsVoid `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsVoid `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/termsaggregation.go b/typedapi/types/termsaggregation.go old mode 100755 new mode 100644 index f0c6beccaa..222936d25c --- a/typedapi/types/termsaggregation.go +++ b/typedapi/types/termsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,12 +30,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L380-L397 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L380-L397 type TermsAggregation struct { CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` Exclude []string `json:"exclude,omitempty"` @@ -43,7 +45,7 @@ type TermsAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Include TermsInclude `json:"include,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` MinDocCount *int `json:"min_doc_count,omitempty"` Missing Missing `json:"missing,omitempty"` MissingBucket *bool `json:"missing_bucket,omitempty"` @@ -58,6 +60,7 @@ type TermsAggregation struct { } func (s *TermsAggregation) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,8 +80,19 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "exclude": - if err := dec.Decode(&s.Exclude); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Exclude = append(s.Exclude, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Exclude); err != nil { + return err + } } case "execution_hint": @@ -92,9 +106,12 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "format": - if err := dec.Decode(&s.Format); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Format = &o case "include": if err := dec.Decode(&s.Include); err != nil { @@ -107,8 +124,19 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "min_doc_count": - if err := dec.Decode(&s.MinDocCount); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int(v) + s.MinDocCount = &f } case "missing": @@ -117,8 +145,17 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "missing_bucket": - if err := dec.Decode(&s.MissingBucket); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MissingBucket = &value + case bool: + s.MissingBucket = &v } case "missing_order": @@ -127,9 +164,12 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "name": - if err := dec.Decode(&s.Name); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Name = &o case "order": @@ -138,15 +178,17 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o - case '[': o := make([]map[string]sortorder.SortOrder, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Order = o } @@ -156,24 +198,58 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "shard_size": - if err := dec.Decode(&s.ShardSize); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f } case "show_term_doc_count_error": - if err := dec.Decode(&s.ShowTermDocCountError); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.ShowTermDocCountError = &value + case bool: + s.ShowTermDocCountError = &v } case "size": - if err := dec.Decode(&s.Size); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f } case "value_type": - if err := dec.Decode(&s.ValueType); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.ValueType = &o } } diff --git a/typedapi/types/termsexclude.go b/typedapi/types/termsexclude.go old mode 100755 new mode 100644 index 592600dc4f..cc42fbabc7 --- a/typedapi/types/termsexclude.go +++ b/typedapi/types/termsexclude.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TermsExclude type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L422-L423 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L422-L423 type TermsExclude []string diff --git a/typedapi/types/termsgrouping.go b/typedapi/types/termsgrouping.go old mode 100755 new mode 100644 index c7ba512740..08478d04c4 --- a/typedapi/types/termsgrouping.go +++ b/typedapi/types/termsgrouping.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TermsGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/rollup/_types/Groupings.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/rollup/_types/Groupings.ts#L40-L42 type TermsGrouping struct { Fields []string `json:"fields"` } +func (s *TermsGrouping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Fields = append(s.Fields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Fields); err != nil { + return err + } + } + + } + } + return nil +} + // NewTermsGrouping returns a TermsGrouping. func NewTermsGrouping() *TermsGrouping { r := &TermsGrouping{} diff --git a/typedapi/types/termsinclude.go b/typedapi/types/termsinclude.go old mode 100755 new mode 100644 index 9e57dcd3c0..0ff949fbe1 --- a/typedapi/types/termsinclude.go +++ b/typedapi/types/termsinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,5 +26,5 @@ package types // []string // TermsPartition // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L419-L420 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L419-L420 type TermsInclude interface{} diff --git a/typedapi/types/termslookup.go b/typedapi/types/termslookup.go old mode 100755 new mode 100644 index 220c15321a..3ed141c8df --- a/typedapi/types/termslookup.go +++ b/typedapi/types/termslookup.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TermsLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L132-L137 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L132-L137 type TermsLookup struct { Id string `json:"id"` Index string `json:"index"` @@ -30,6 +38,46 @@ type TermsLookup struct { Routing *string `json:"routing,omitempty"` } +func (s *TermsLookup) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "path": + if err := dec.Decode(&s.Path); err != nil { + return err + } + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + } + } + return nil +} + // NewTermsLookup returns a TermsLookup. func NewTermsLookup() *TermsLookup { r := &TermsLookup{} diff --git a/typedapi/types/termspartition.go b/typedapi/types/termspartition.go old mode 100755 new mode 100644 index c6414317cc..2dc9e28e61 --- a/typedapi/types/termspartition.go +++ b/typedapi/types/termspartition.go @@ -16,18 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermsPartition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L425-L428 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L425-L428 type TermsPartition struct { NumPartitions int64 `json:"num_partitions"` Partition int64 `json:"partition"` } +func (s *TermsPartition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "num_partitions": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.NumPartitions = value + case float64: + f := int64(v) + s.NumPartitions = f + } + + case "partition": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Partition = value + case float64: + f := int64(v) + s.Partition = f + } + + } + } + return nil +} + // NewTermsPartition returns a TermsPartition. func NewTermsPartition() *TermsPartition { r := &TermsPartition{} diff --git a/typedapi/types/termsquery.go b/typedapi/types/termsquery.go old mode 100755 new mode 100644 index b08657bbab..1d11b7b27d --- a/typedapi/types/termsquery.go +++ b/typedapi/types/termsquery.go @@ -16,22 +16,83 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" "fmt" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L123-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L123-L125 type TermsQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` - TermsQuery map[string]TermsQueryField `json:"-"` + TermsQuery map[string]TermsQueryField `json:"TermsQuery,omitempty"` +} + +func (s *TermsQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "TermsQuery": + if s.TermsQuery == nil { + s.TermsQuery = make(map[string]TermsQueryField, 0) + } + if err := dec.Decode(&s.TermsQuery); err != nil { + return err + } + + default: + + } + } + return nil } // MarhsalJSON overrides marshalling for types with additional properties @@ -53,6 +114,7 @@ func (s TermsQuery) MarshalJSON() ([]byte, error) { for key, value := range s.TermsQuery { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "TermsQuery") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/termsqueryfield.go b/typedapi/types/termsqueryfield.go old mode 100755 new mode 100644 index f8bb099350..bb76c90e3d --- a/typedapi/types/termsqueryfield.go +++ b/typedapi/types/termsqueryfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // []FieldValue // TermsLookup // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L127-L130 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L127-L130 type TermsQueryField interface{} diff --git a/typedapi/types/termssetquery.go b/typedapi/types/termssetquery.go old mode 100755 new mode 100644 index 838c12a20a..ef87c59754 --- a/typedapi/types/termssetquery.go +++ b/typedapi/types/termssetquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermsSetQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L139-L143 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L139-L143 type TermsSetQuery struct { Boost *float32 `json:"boost,omitempty"` MinimumShouldMatchField *string `json:"minimum_should_match_field,omitempty"` @@ -31,6 +41,65 @@ type TermsSetQuery struct { Terms []string `json:"terms"` } +func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "minimum_should_match_field": + if err := dec.Decode(&s.MinimumShouldMatchField); err != nil { + return err + } + + case "minimum_should_match_script": + if err := dec.Decode(&s.MinimumShouldMatchScript); err != nil { + return err + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "terms": + if err := dec.Decode(&s.Terms); err != nil { + return err + } + + } + } + return nil +} + // NewTermsSetQuery returns a TermsSetQuery. func NewTermsSetQuery() *TermsSetQuery { r := &TermsSetQuery{} diff --git a/typedapi/types/termsuggest.go b/typedapi/types/termsuggest.go old mode 100755 new mode 100644 index 53c8148941..7e028d6dea --- a/typedapi/types/termsuggest.go +++ b/typedapi/types/termsuggest.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L64-L69 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L64-L69 type TermSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -30,6 +40,82 @@ type TermSuggest struct { Text string `json:"text"` } +func (s *TermSuggest) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Length = value + case float64: + f := int(v) + s.Length = f + } + + case "offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Offset = value + case float64: + f := int(v) + s.Offset = f + } + + case "options": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewTermSuggestOption() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Options = append(s.Options, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Options); err != nil { + return err + } + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewTermSuggest returns a TermSuggest. func NewTermSuggest() *TermSuggest { r := &TermSuggest{} diff --git a/typedapi/types/termsuggester.go b/typedapi/types/termsuggester.go old mode 100755 new mode 100644 index 155891aac1..99dc19316d --- a/typedapi/types/termsuggester.go +++ b/typedapi/types/termsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -24,11 +24,19 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/stringdistance" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/suggestmode" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/suggestsort" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TermSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L252-L265 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L256-L269 type TermSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Field string `json:"field"` @@ -47,6 +55,204 @@ type TermSuggester struct { Text *string `json:"text,omitempty"` } +func (s *TermSuggester) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "analyzer": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Analyzer = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "lowercase_terms": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.LowercaseTerms = &value + case bool: + s.LowercaseTerms = &v + } + + case "max_edits": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxEdits = &value + case float64: + f := int(v) + s.MaxEdits = &f + } + + case "max_inspections": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxInspections = &value + case float64: + f := int(v) + s.MaxInspections = &f + } + + case "max_term_freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MaxTermFreq = &f + case float64: + f := float32(v) + s.MaxTermFreq = &f + } + + case "min_doc_freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.MinDocFreq = &f + case float64: + f := float32(v) + s.MinDocFreq = &f + } + + case "min_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordLength = &value + case float64: + f := int(v) + s.MinWordLength = &f + } + + case "prefix_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PrefixLength = &value + case float64: + f := int(v) + s.PrefixLength = &f + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + case "string_distance": + if err := dec.Decode(&s.StringDistance); err != nil { + return err + } + + case "suggest_mode": + if err := dec.Decode(&s.SuggestMode); err != nil { + return err + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = &o + + } + } + return nil +} + // NewTermSuggester returns a TermSuggester. func NewTermSuggester() *TermSuggester { r := &TermSuggester{} diff --git a/typedapi/types/termsuggestoption.go b/typedapi/types/termsuggestoption.go old mode 100755 new mode 100644 index 1a9111e3fb..deaedf5826 --- a/typedapi/types/termsuggestoption.go +++ b/typedapi/types/termsuggestoption.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/suggester.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/suggester.ts#L93-L99 type TermSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Freq int64 `json:"freq"` @@ -31,6 +41,87 @@ type TermSuggestOption struct { Text string `json:"text"` } +func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collate_match": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CollateMatch = &value + case bool: + s.CollateMatch = &v + } + + case "freq": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Freq = value + case float64: + f := int64(v) + s.Freq = f + } + + case "highlighted": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Highlighted = &o + + case "score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Score = f + case float64: + f := Float64(v) + s.Score = f + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Text = o + + } + } + return nil +} + // NewTermSuggestOption returns a TermSuggestOption. func NewTermSuggestOption() *TermSuggestOption { r := &TermSuggestOption{} diff --git a/typedapi/types/termvector.go b/typedapi/types/termvector.go old mode 100755 new mode 100644 index 46fd4c708d..f013520859 --- a/typedapi/types/termvector.go +++ b/typedapi/types/termvector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TermVector type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/types.ts#L23-L26 type TermVector struct { FieldStatistics FieldStatistics `json:"field_statistics"` Terms map[string]Term `json:"terms"` diff --git a/typedapi/types/termvectorsfilter.go b/typedapi/types/termvectorsfilter.go old mode 100755 new mode 100644 index 758144e1c9..18fa6e0b7e --- a/typedapi/types/termvectorsfilter.go +++ b/typedapi/types/termvectorsfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermVectorsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/types.ts#L49-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/types.ts#L49-L57 type TermVectorsFilter struct { MaxDocFreq *int `json:"max_doc_freq,omitempty"` MaxNumTerms *int `json:"max_num_terms,omitempty"` @@ -33,6 +43,138 @@ type TermVectorsFilter struct { MinWordLength *int `json:"min_word_length,omitempty"` } +func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_doc_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxDocFreq = &value + case float64: + f := int(v) + s.MaxDocFreq = &f + } + + case "max_num_terms": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxNumTerms = &value + case float64: + f := int(v) + s.MaxNumTerms = &f + } + + case "max_term_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTermFreq = &value + case float64: + f := int(v) + s.MaxTermFreq = &f + } + + case "max_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxWordLength = &value + case float64: + f := int(v) + s.MaxWordLength = &f + } + + case "min_doc_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinDocFreq = &value + case float64: + f := int(v) + s.MinDocFreq = &f + } + + case "min_term_freq": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinTermFreq = &value + case float64: + f := int(v) + s.MinTermFreq = &f + } + + case "min_word_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinWordLength = &value + case float64: + f := int(v) + s.MinWordLength = &f + } + + } + } + return nil +} + // NewTermVectorsFilter returns a TermVectorsFilter. func NewTermVectorsFilter() *TermVectorsFilter { r := &TermVectorsFilter{} diff --git a/typedapi/types/termvectorsresult.go b/typedapi/types/termvectorsresult.go old mode 100755 new mode 100644 index 7a7ffd4a94..14e65c6aa2 --- a/typedapi/types/termvectorsresult.go +++ b/typedapi/types/termvectorsresult.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermVectorsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/mtermvectors/types.ts#L51-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/mtermvectors/types.ts#L51-L59 type TermVectorsResult struct { Error *ErrorCause `json:"error,omitempty"` Found *bool `json:"found,omitempty"` @@ -33,6 +43,83 @@ type TermVectorsResult struct { Version_ *int64 `json:"_version,omitempty"` } +func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "error": + if err := dec.Decode(&s.Error); err != nil { + return err + } + + case "found": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Found = &value + case bool: + s.Found = &v + } + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + case "term_vectors": + if s.TermVectors == nil { + s.TermVectors = make(map[string]TermVector, 0) + } + if err := dec.Decode(&s.TermVectors); err != nil { + return err + } + + case "took": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Took = &value + case float64: + f := int64(v) + s.Took = &f + } + + case "_version": + if err := dec.Decode(&s.Version_); err != nil { + return err + } + + } + } + return nil +} + // NewTermVectorsResult returns a TermVectorsResult. func NewTermVectorsResult() *TermVectorsResult { r := &TermVectorsResult{ diff --git a/typedapi/types/termvectorstoken.go b/typedapi/types/termvectorstoken.go old mode 100755 new mode 100644 index 3b5e94ce9e..a510254df3 --- a/typedapi/types/termvectorstoken.go +++ b/typedapi/types/termvectorstoken.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TermVectorsToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/termvectors/types.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/termvectors/types.ts#L42-L47 type TermVectorsToken struct { EndOffset *int `json:"end_offset,omitempty"` Payload *string `json:"payload,omitempty"` @@ -30,6 +40,82 @@ type TermVectorsToken struct { StartOffset *int `json:"start_offset,omitempty"` } +func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "end_offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EndOffset = &value + case float64: + f := int(v) + s.EndOffset = &f + } + + case "payload": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Payload = &o + + case "position": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Position = value + case float64: + f := int(v) + s.Position = f + } + + case "start_offset": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.StartOffset = &value + case float64: + f := int(v) + s.StartOffset = &f + } + + } + } + return nil +} + // NewTermVectorsToken returns a TermVectorsToken. func NewTermVectorsToken() *TermVectorsToken { r := &TermVectorsToken{} diff --git a/typedapi/types/testpopulation.go b/typedapi/types/testpopulation.go old mode 100755 new mode 100644 index 6390e34cf9..153baf93a6 --- a/typedapi/types/testpopulation.go +++ b/typedapi/types/testpopulation.go @@ -16,19 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TestPopulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L159-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L159-L163 type TestPopulation struct { Field string `json:"field"` Filter *Query `json:"filter,omitempty"` Script Script `json:"script,omitempty"` } +func (s *TestPopulation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "filter": + if err := dec.Decode(&s.Filter); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewTestPopulation returns a TestPopulation. func NewTestPopulation() *TestPopulation { r := &TestPopulation{} diff --git a/typedapi/types/textclassificationinferenceoptions.go b/typedapi/types/textclassificationinferenceoptions.go old mode 100755 new mode 100644 index bfd7e01bff..bd16287189 --- a/typedapi/types/textclassificationinferenceoptions.go +++ b/typedapi/types/textclassificationinferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TextClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L174-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L174-L184 type TextClassificationInferenceOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -36,6 +46,60 @@ type TextClassificationInferenceOptions struct { Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` } +func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classification_labels": + if err := dec.Decode(&s.ClassificationLabels); err != nil { + return err + } + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewTextClassificationInferenceOptions returns a TextClassificationInferenceOptions. func NewTextClassificationInferenceOptions() *TextClassificationInferenceOptions { r := &TextClassificationInferenceOptions{} diff --git a/typedapi/types/textclassificationinferenceupdateoptions.go b/typedapi/types/textclassificationinferenceupdateoptions.go old mode 100755 new mode 100644 index 275d504a74..9f5f7b5203 --- a/typedapi/types/textclassificationinferenceupdateoptions.go +++ b/typedapi/types/textclassificationinferenceupdateoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TextClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L328-L337 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L328-L337 type TextClassificationInferenceUpdateOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -36,6 +46,60 @@ type TextClassificationInferenceUpdateOptions struct { Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` } +func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classification_labels": + if err := dec.Decode(&s.ClassificationLabels); err != nil { + return err + } + + case "num_top_classes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumTopClasses = &value + case float64: + f := int(v) + s.NumTopClasses = &f + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewTextClassificationInferenceUpdateOptions returns a TextClassificationInferenceUpdateOptions. func NewTextClassificationInferenceUpdateOptions() *TextClassificationInferenceUpdateOptions { r := &TextClassificationInferenceUpdateOptions{} diff --git a/typedapi/types/textembedding.go b/typedapi/types/textembedding.go old mode 100755 new mode 100644 index 620360a41f..c14898bdcc --- a/typedapi/types/textembedding.go +++ b/typedapi/types/textembedding.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TextEmbedding type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Knn.ts#L48-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Knn.ts#L48-L51 type TextEmbedding struct { ModelId string `json:"model_id"` ModelText string `json:"model_text"` diff --git a/typedapi/types/textembeddinginferenceoptions.go b/typedapi/types/textembeddinginferenceoptions.go old mode 100755 new mode 100644 index aec81f7eca..5c7f5ef19b --- a/typedapi/types/textembeddinginferenceoptions.go +++ b/typedapi/types/textembeddinginferenceoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TextEmbeddingInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L222-L228 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L222-L228 type TextEmbeddingInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/textembeddinginferenceupdateoptions.go b/typedapi/types/textembeddinginferenceupdateoptions.go old mode 100755 new mode 100644 index 5d13dc6feb..24812ebbbd --- a/typedapi/types/textembeddinginferenceupdateoptions.go +++ b/typedapi/types/textembeddinginferenceupdateoptions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TextEmbeddingInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L357-L361 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L357-L361 type TextEmbeddingInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/textindexprefixes.go b/typedapi/types/textindexprefixes.go old mode 100755 new mode 100644 index 28374a4d1b..4ad7ad618d --- a/typedapi/types/textindexprefixes.go +++ b/typedapi/types/textindexprefixes.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TextIndexPrefixes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L242-L245 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L242-L245 type TextIndexPrefixes struct { MaxChars int `json:"max_chars"` MinChars int `json:"min_chars"` } +func (s *TextIndexPrefixes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_chars": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxChars = value + case float64: + f := int(v) + s.MaxChars = f + } + + case "min_chars": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MinChars = value + case float64: + f := int(v) + s.MinChars = f + } + + } + } + return nil +} + // NewTextIndexPrefixes returns a TextIndexPrefixes. func NewTextIndexPrefixes() *TextIndexPrefixes { r := &TextIndexPrefixes{} diff --git a/typedapi/types/textproperty.go b/typedapi/types/textproperty.go old mode 100755 new mode 100644 index c2e1eed0fa..ddf12fbd4a --- a/typedapi/types/textproperty.go +++ b/typedapi/types/textproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L247-L263 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L247-L263 type TextProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -63,6 +65,7 @@ type TextProperty struct { } func (s *TextProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,18 +80,43 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { switch t { case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "dynamic": @@ -97,13 +125,31 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - if err := dec.Decode(&s.EagerGlobalOrdinals); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EagerGlobalOrdinals = &value + case bool: + s.EagerGlobalOrdinals = &v } case "fielddata": - if err := dec.Decode(&s.Fielddata); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Fielddata = &value + case bool: + s.Fielddata = &v } case "fielddata_frequency_filter": @@ -112,6 +158,9 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -399,20 +448,40 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "index_options": @@ -421,8 +490,17 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "index_phrases": - if err := dec.Decode(&s.IndexPhrases); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IndexPhrases = &value + case bool: + s.IndexPhrases = &v } case "index_prefixes": @@ -431,21 +509,47 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "norms": - if err := dec.Decode(&s.Norms); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Norms = &value + case bool: + s.Norms = &v } case "position_increment_gap": - if err := dec.Decode(&s.PositionIncrementGap); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PositionIncrementGap = &value + case float64: + f := int(v) + s.PositionIncrementGap = &f } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -733,30 +837,48 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "search_analyzer": - if err := dec.Decode(&s.SearchAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchAnalyzer = &o case "search_quote_analyzer": - if err := dec.Decode(&s.SearchQuoteAnalyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.SearchQuoteAnalyzer = &o case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "term_vector": diff --git a/typedapi/types/texttoanalyze.go b/typedapi/types/texttoanalyze.go old mode 100755 new mode 100644 index 206199c809..53c477d9d0 --- a/typedapi/types/texttoanalyze.go +++ b/typedapi/types/texttoanalyze.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TextToAnalyze type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L66-L66 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L66-L66 type TextToAnalyze []string diff --git a/typedapi/types/threadcount.go b/typedapi/types/threadcount.go old mode 100755 new mode 100644 index 01cc8b4e7b..d12d4d26ff --- a/typedapi/types/threadcount.go +++ b/typedapi/types/threadcount.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ThreadCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L404-L411 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L404-L411 type ThreadCount struct { Active *int64 `json:"active,omitempty"` Completed *int64 `json:"completed,omitempty"` @@ -32,6 +42,116 @@ type ThreadCount struct { Threads *int64 `json:"threads,omitempty"` } +func (s *ThreadCount) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Active = &value + case float64: + f := int64(v) + s.Active = &f + } + + case "completed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Completed = &value + case float64: + f := int64(v) + s.Completed = &f + } + + case "largest": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Largest = &value + case float64: + f := int64(v) + s.Largest = &f + } + + case "queue": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Queue = &value + case float64: + f := int64(v) + s.Queue = &f + } + + case "rejected": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Rejected = &value + case float64: + f := int64(v) + s.Rejected = &f + } + + case "threads": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Threads = &value + case float64: + f := int64(v) + s.Threads = &f + } + + } + } + return nil +} + // NewThreadCount returns a ThreadCount. func NewThreadCount() *ThreadCount { r := &ThreadCount{} diff --git a/typedapi/types/threadpoolrecord.go b/typedapi/types/threadpoolrecord.go old mode 100755 new mode 100644 index aad0eaa4b0..b0ad93dc56 --- a/typedapi/types/threadpoolrecord.go +++ b/typedapi/types/threadpoolrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ThreadPoolRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/thread_pool/types.ts#L22-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/thread_pool/types.ts#L22-L123 type ThreadPoolRecord struct { // Active number of active threads Active *string `json:"active,omitempty"` @@ -66,6 +74,183 @@ type ThreadPoolRecord struct { Type *string `json:"type,omitempty"` } +func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active", "a": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Active = &o + + case "completed", "c": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Completed = &o + + case "core", "cr": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Core = o + + case "ephemeral_node_id", "eid": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.EphemeralNodeId = &o + + case "host", "h": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Host = &o + + case "ip", "i": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Ip = &o + + case "keep_alive", "ka": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.KeepAlive = o + + case "largest", "l": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Largest = &o + + case "max", "mx": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Max = o + + case "name", "n": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "node_id", "id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "node_name", "nn": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.NodeName = &o + + case "pid", "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pid = &o + + case "pool_size", "psz": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PoolSize = &o + + case "port", "po": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Port = &o + + case "queue", "q": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Queue = &o + + case "queue_size", "qs": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueueSize = &o + + case "rejected", "r": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Rejected = &o + + case "size", "sz": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Size = o + + case "type", "t": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + } + } + return nil +} + // NewThreadPoolRecord returns a ThreadPoolRecord. func NewThreadPoolRecord() *ThreadPoolRecord { r := &ThreadPoolRecord{} diff --git a/typedapi/types/throttlestate.go b/typedapi/types/throttlestate.go old mode 100755 new mode 100644 index 54aec5024f..8110371398 --- a/typedapi/types/throttlestate.go +++ b/typedapi/types/throttlestate.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ThrottleState type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L123-L126 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L123-L126 type ThrottleState struct { Reason string `json:"reason"` Timestamp DateTime `json:"timestamp"` } +func (s *ThrottleState) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewThrottleState returns a ThrottleState. func NewThrottleState() *ThrottleState { r := &ThrottleState{} diff --git a/typedapi/types/timeofmonth.go b/typedapi/types/timeofmonth.go old mode 100755 new mode 100644 index 218033bca3..6bce669374 --- a/typedapi/types/timeofmonth.go +++ b/typedapi/types/timeofmonth.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TimeOfMonth type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L115-L118 type TimeOfMonth struct { At []string `json:"at"` On []int `json:"on"` } +func (s *TimeOfMonth) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "at": + if err := dec.Decode(&s.At); err != nil { + return err + } + + case "on": + if err := dec.Decode(&s.On); err != nil { + return err + } + + } + } + return nil +} + // NewTimeOfMonth returns a TimeOfMonth. func NewTimeOfMonth() *TimeOfMonth { r := &TimeOfMonth{} diff --git a/typedapi/types/timeofweek.go b/typedapi/types/timeofweek.go old mode 100755 new mode 100644 index 61ebf7fa9c..a80d54feb4 --- a/typedapi/types/timeofweek.go +++ b/typedapi/types/timeofweek.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // TimeOfWeek type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L120-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L120-L123 type TimeOfWeek struct { At []string `json:"at"` On []day.Day `json:"on"` diff --git a/typedapi/types/timeofyear.go b/typedapi/types/timeofyear.go old mode 100755 new mode 100644 index 7b598b4d03..2dd95c362d --- a/typedapi/types/timeofyear.go +++ b/typedapi/types/timeofyear.go @@ -16,23 +16,64 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/month" + + "bytes" + "errors" + "io" + + "encoding/json" ) // TimeOfYear type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Schedule.ts#L125-L129 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Schedule.ts#L125-L129 type TimeOfYear struct { At []string `json:"at"` Int []month.Month `json:"int"` On []int `json:"on"` } +func (s *TimeOfYear) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "at": + if err := dec.Decode(&s.At); err != nil { + return err + } + + case "int": + if err := dec.Decode(&s.Int); err != nil { + return err + } + + case "on": + if err := dec.Decode(&s.On); err != nil { + return err + } + + } + } + return nil +} + // NewTimeOfYear returns a TimeOfYear. func NewTimeOfYear() *TimeOfYear { r := &TimeOfYear{} diff --git a/typedapi/types/timesync.go b/typedapi/types/timesync.go old mode 100755 new mode 100644 index 69dbfc78ae..f7440bcd87 --- a/typedapi/types/timesync.go +++ b/typedapi/types/timesync.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TimeSync type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L175-L187 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L175-L187 type TimeSync struct { // Delay The time delay between the current time and the latest input data time. Delay Duration `json:"delay,omitempty"` @@ -34,6 +42,36 @@ type TimeSync struct { Field string `json:"field"` } +func (s *TimeSync) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "delay": + if err := dec.Decode(&s.Delay); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + } + } + return nil +} + // NewTimeSync returns a TimeSync. func NewTimeSync() *TimeSync { r := &TimeSync{} diff --git a/typedapi/types/timingstats.go b/typedapi/types/timingstats.go old mode 100755 new mode 100644 index 633dd17da4..03419230b7 --- a/typedapi/types/timingstats.go +++ b/typedapi/types/timingstats.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L421-L426 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L421-L426 type TimingStats struct { // ElapsedTime Runtime of the analysis in milliseconds. ElapsedTime int64 `json:"elapsed_time"` @@ -30,6 +38,36 @@ type TimingStats struct { IterationTime *int64 `json:"iteration_time,omitempty"` } +func (s *TimingStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "elapsed_time": + if err := dec.Decode(&s.ElapsedTime); err != nil { + return err + } + + case "iteration_time": + if err := dec.Decode(&s.IterationTime); err != nil { + return err + } + + } + } + return nil +} + // NewTimingStats returns a TimingStats. func NewTimingStats() *TimingStats { r := &TimingStats{} diff --git a/typedapi/types/tokencountproperty.go b/typedapi/types/tokencountproperty.go old mode 100755 new mode 100644 index d3137af1e1..0adc345a08 --- a/typedapi/types/tokencountproperty.go +++ b/typedapi/types/tokencountproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TokenCountProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/specialized.ts#L78-L85 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/specialized.ts#L78-L85 type TokenCountProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -53,6 +55,7 @@ type TokenCountProperty struct { } func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -67,23 +70,57 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { switch t { case "analyzer": - if err := dec.Decode(&s.Analyzer); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Analyzer = &o case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -92,11 +129,23 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } case "enable_position_increments": - if err := dec.Decode(&s.EnablePositionIncrements); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.EnablePositionIncrements = &value + case bool: + s.EnablePositionIncrements = &v } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -384,33 +433,70 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.NullValue = &f + case float64: + f := Float64(v) + s.NullValue = &f } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -698,20 +784,32 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/tokendetail.go b/typedapi/types/tokendetail.go old mode 100755 new mode 100644 index 7cbf0cb806..f35f5c157b --- a/typedapi/types/tokendetail.go +++ b/typedapi/types/tokendetail.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TokenDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/analyze/types.ts#L68-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/analyze/types.ts#L68-L71 type TokenDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` diff --git a/typedapi/types/tokenfilter.go b/typedapi/types/tokenfilter.go old mode 100755 new mode 100644 index 593e85ede0..0383f423f8 --- a/typedapi/types/tokenfilter.go +++ b/typedapi/types/tokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // TokenFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L342-L344 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L344-L346 type TokenFilter interface{} diff --git a/typedapi/types/tokenfilterdefinition.go b/typedapi/types/tokenfilterdefinition.go old mode 100755 new mode 100644 index a92456bcfb..b66bf90c73 --- a/typedapi/types/tokenfilterdefinition.go +++ b/typedapi/types/tokenfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -71,5 +71,5 @@ package types // PhoneticTokenFilter // DictionaryDecompounderTokenFilter // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L346-L399 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L348-L401 type TokenFilterDefinition interface{} diff --git a/typedapi/types/tokenizationconfigcontainer.go b/typedapi/types/tokenizationconfigcontainer.go old mode 100755 new mode 100644 index 91b21f6b8a..85bb58cfbb --- a/typedapi/types/tokenizationconfigcontainer.go +++ b/typedapi/types/tokenizationconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TokenizationConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L97-L114 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L97-L114 type TokenizationConfigContainer struct { // Bert Indicates BERT tokenization and its options Bert *NlpBertTokenizationConfig `json:"bert,omitempty"` diff --git a/typedapi/types/tokenizer.go b/typedapi/types/tokenizer.go old mode 100755 new mode 100644 index 0f24237538..8c4f3c5894 --- a/typedapi/types/tokenizer.go +++ b/typedapi/types/tokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // string // TokenizerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L119-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L119-L121 type Tokenizer interface{} diff --git a/typedapi/types/tokenizerdefinition.go b/typedapi/types/tokenizerdefinition.go old mode 100755 new mode 100644 index 2bc62a1fe5..afdbfef1cf --- a/typedapi/types/tokenizerdefinition.go +++ b/typedapi/types/tokenizerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -37,5 +37,5 @@ package types // PatternTokenizer // IcuTokenizer // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L123-L141 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L123-L141 type TokenizerDefinition interface{} diff --git a/typedapi/types/topclassentry.go b/typedapi/types/topclassentry.go old mode 100755 new mode 100644 index f24f6ff5f7..6065f43f98 --- a/typedapi/types/topclassentry.go +++ b/typedapi/types/topclassentry.go @@ -16,19 +16,89 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L399-L403 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L399-L403 type TopClassEntry struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` ClassScore Float64 `json:"class_score"` } +func (s *TopClassEntry) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClassName = o + + case "class_probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ClassProbability = f + case float64: + f := Float64(v) + s.ClassProbability = f + } + + case "class_score": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ClassScore = f + case float64: + f := Float64(v) + s.ClassScore = f + } + + } + } + return nil +} + // NewTopClassEntry returns a TopClassEntry. func NewTopClassEntry() *TopClassEntry { r := &TopClassEntry{} diff --git a/typedapi/types/tophitsaggregate.go b/typedapi/types/tophitsaggregate.go old mode 100755 new mode 100644 index 56aa1c1be6..27dc644b26 --- a/typedapi/types/tophitsaggregate.go +++ b/typedapi/types/tophitsaggregate.go @@ -16,20 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TopHitsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L645-L648 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L654-L657 type TopHitsAggregate struct { - Hits HitsMetadata `json:"hits"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Hits HitsMetadata `json:"hits"` + Meta Metadata `json:"meta,omitempty"` +} + +func (s *TopHitsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "hits": + if err := dec.Decode(&s.Hits); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + } + } + return nil } // NewTopHitsAggregate returns a TopHitsAggregate. diff --git a/typedapi/types/tophitsaggregation.go b/typedapi/types/tophitsaggregation.go old mode 100755 new mode 100644 index 37f28e51c9..fe044639ec --- a/typedapi/types/tophitsaggregation.go +++ b/typedapi/types/tophitsaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TopHitsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L171-L184 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L171-L184 type TopHitsAggregation struct { DocvalueFields []string `json:"docvalue_fields,omitempty"` Explain *bool `json:"explain,omitempty"` @@ -41,6 +51,195 @@ type TopHitsAggregation struct { Version *bool `json:"version,omitempty"` } +func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "docvalue_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.DocvalueFields = append(s.DocvalueFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.DocvalueFields); err != nil { + return err + } + } + + case "explain": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Explain = &value + case bool: + s.Explain = &v + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "from": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.From = &value + case float64: + f := int(v) + s.From = &f + } + + case "highlight": + if err := dec.Decode(&s.Highlight); err != nil { + return err + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "script_fields": + if s.ScriptFields == nil { + s.ScriptFields = make(map[string]ScriptField, 0) + } + if err := dec.Decode(&s.ScriptFields); err != nil { + return err + } + + case "seq_no_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SeqNoPrimaryTerm = &value + case bool: + s.SeqNoPrimaryTerm = &v + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + case "_source": + if err := dec.Decode(&s.Source_); err != nil { + return err + } + + case "stored_fields": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.StoredFields = append(s.StoredFields, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.StoredFields); err != nil { + return err + } + } + + case "track_scores": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TrackScores = &value + case bool: + s.TrackScores = &v + } + + case "version": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Version = &value + case bool: + s.Version = &v + } + + } + } + return nil +} + // NewTopHitsAggregation returns a TopHitsAggregation. func NewTopHitsAggregation() *TopHitsAggregation { r := &TopHitsAggregation{ diff --git a/typedapi/types/topleftbottomrightgeobounds.go b/typedapi/types/topleftbottomrightgeobounds.go old mode 100755 new mode 100644 index 24733bec6c..b6ee2b7eb1 --- a/typedapi/types/topleftbottomrightgeobounds.go +++ b/typedapi/types/topleftbottomrightgeobounds.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TopLeftBottomRightGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L145-L148 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L145-L148 type TopLeftBottomRightGeoBounds struct { BottomRight GeoLocation `json:"bottom_right"` TopLeft GeoLocation `json:"top_left"` } +func (s *TopLeftBottomRightGeoBounds) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bottom_right": + if err := dec.Decode(&s.BottomRight); err != nil { + return err + } + + case "top_left": + if err := dec.Decode(&s.TopLeft); err != nil { + return err + } + + } + } + return nil +} + // NewTopLeftBottomRightGeoBounds returns a TopLeftBottomRightGeoBounds. func NewTopLeftBottomRightGeoBounds() *TopLeftBottomRightGeoBounds { r := &TopLeftBottomRightGeoBounds{} diff --git a/typedapi/types/topmetrics.go b/typedapi/types/topmetrics.go old mode 100755 new mode 100644 index 0dafa6dc83..855c7dd5a9 --- a/typedapi/types/topmetrics.go +++ b/typedapi/types/topmetrics.go @@ -16,18 +16,58 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "encoding/json" + "errors" + "io" +) + // TopMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L720-L724 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L729-L733 type TopMetrics struct { Metrics map[string]FieldValue `json:"metrics"` Sort []FieldValue `json:"sort"` } +func (s *TopMetrics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "metrics": + if s.Metrics == nil { + s.Metrics = make(map[string]FieldValue, 0) + } + if err := dec.Decode(&s.Metrics); err != nil { + return err + } + + case "sort": + if err := dec.Decode(&s.Sort); err != nil { + return err + } + + } + } + return nil +} + // NewTopMetrics returns a TopMetrics. func NewTopMetrics() *TopMetrics { r := &TopMetrics{ diff --git a/typedapi/types/topmetricsaggregate.go b/typedapi/types/topmetricsaggregate.go old mode 100755 new mode 100644 index a554db9252..f198890f3d --- a/typedapi/types/topmetricsaggregate.go +++ b/typedapi/types/topmetricsaggregate.go @@ -16,20 +16,54 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TopMetricsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L715-L718 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L724-L727 type TopMetricsAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Top []TopMetrics `json:"top"` + Meta Metadata `json:"meta,omitempty"` + Top []TopMetrics `json:"top"` +} + +func (s *TopMetricsAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "top": + if err := dec.Decode(&s.Top); err != nil { + return err + } + + } + } + return nil } // NewTopMetricsAggregate returns a TopMetricsAggregate. diff --git a/typedapi/types/topmetricsaggregation.go b/typedapi/types/topmetricsaggregation.go old mode 100755 new mode 100644 index f2018d36d4..414c3efde5 --- a/typedapi/types/topmetricsaggregation.go +++ b/typedapi/types/topmetricsaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TopMetricsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L186-L190 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L186-L190 type TopMetricsAggregation struct { Field *string `json:"field,omitempty"` Metrics []TopMetricsValue `json:"metrics,omitempty"` @@ -32,6 +42,89 @@ type TopMetricsAggregation struct { Sort []SortCombinations `json:"sort,omitempty"` } +func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "metrics": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewTopMetricsValue() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Metrics = append(s.Metrics, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Metrics); err != nil { + return err + } + } + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return err + } + } + + } + } + return nil +} + // NewTopMetricsAggregation returns a TopMetricsAggregation. func NewTopMetricsAggregation() *TopMetricsAggregation { r := &TopMetricsAggregation{} diff --git a/typedapi/types/topmetricsvalue.go b/typedapi/types/topmetricsvalue.go old mode 100755 new mode 100644 index ac63406065..761bf4716e --- a/typedapi/types/topmetricsvalue.go +++ b/typedapi/types/topmetricsvalue.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TopMetricsValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L192-L194 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L192-L194 type TopMetricsValue struct { Field string `json:"field"` } +func (s *TopMetricsValue) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + } + } + return nil +} + // NewTopMetricsValue returns a TopMetricsValue. func NewTopMetricsValue() *TopMetricsValue { r := &TopMetricsValue{} diff --git a/typedapi/types/toprightbottomleftgeobounds.go b/typedapi/types/toprightbottomleftgeobounds.go old mode 100755 new mode 100644 index 8a8a48eb20..a1de5d7791 --- a/typedapi/types/toprightbottomleftgeobounds.go +++ b/typedapi/types/toprightbottomleftgeobounds.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TopRightBottomLeftGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L150-L153 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L150-L153 type TopRightBottomLeftGeoBounds struct { BottomLeft GeoLocation `json:"bottom_left"` TopRight GeoLocation `json:"top_right"` } +func (s *TopRightBottomLeftGeoBounds) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bottom_left": + if err := dec.Decode(&s.BottomLeft); err != nil { + return err + } + + case "top_right": + if err := dec.Decode(&s.TopRight); err != nil { + return err + } + + } + } + return nil +} + // NewTopRightBottomLeftGeoBounds returns a TopRightBottomLeftGeoBounds. func NewTopRightBottomLeftGeoBounds() *TopRightBottomLeftGeoBounds { r := &TopRightBottomLeftGeoBounds{} diff --git a/typedapi/types/totalfeatureimportance.go b/typedapi/types/totalfeatureimportance.go old mode 100755 new mode 100644 index 2d1019eff9..fa1524080d --- a/typedapi/types/totalfeatureimportance.go +++ b/typedapi/types/totalfeatureimportance.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TotalFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L222-L229 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L224-L231 type TotalFeatureImportance struct { // Classes If the trained model is a classification model, feature importance statistics // are gathered per target class value. @@ -34,6 +42,41 @@ type TotalFeatureImportance struct { Importance []TotalFeatureImportanceStatistics `json:"importance"` } +func (s *TotalFeatureImportance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + case "feature_name": + if err := dec.Decode(&s.FeatureName); err != nil { + return err + } + + case "importance": + if err := dec.Decode(&s.Importance); err != nil { + return err + } + + } + } + return nil +} + // NewTotalFeatureImportance returns a TotalFeatureImportance. func NewTotalFeatureImportance() *TotalFeatureImportance { r := &TotalFeatureImportance{} diff --git a/typedapi/types/totalfeatureimportanceclass.go b/typedapi/types/totalfeatureimportanceclass.go old mode 100755 new mode 100644 index 91e7b7a8a1..2ade4c8041 --- a/typedapi/types/totalfeatureimportanceclass.go +++ b/typedapi/types/totalfeatureimportanceclass.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TotalFeatureImportanceClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L231-L236 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L233-L238 type TotalFeatureImportanceClass struct { // ClassName The target class value. Could be a string, boolean, or number. ClassName string `json:"class_name"` @@ -31,6 +39,36 @@ type TotalFeatureImportanceClass struct { Importance []TotalFeatureImportanceStatistics `json:"importance"` } +func (s *TotalFeatureImportanceClass) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + if err := dec.Decode(&s.ClassName); err != nil { + return err + } + + case "importance": + if err := dec.Decode(&s.Importance); err != nil { + return err + } + + } + } + return nil +} + // NewTotalFeatureImportanceClass returns a TotalFeatureImportanceClass. func NewTotalFeatureImportanceClass() *TotalFeatureImportanceClass { r := &TotalFeatureImportanceClass{} diff --git a/typedapi/types/totalfeatureimportancestatistics.go b/typedapi/types/totalfeatureimportancestatistics.go old mode 100755 new mode 100644 index 2e5d061ee2..215ed5a137 --- a/typedapi/types/totalfeatureimportancestatistics.go +++ b/typedapi/types/totalfeatureimportancestatistics.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TotalFeatureImportanceStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L238-L245 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L240-L247 type TotalFeatureImportanceStatistics struct { // Max The maximum importance value across all the training data for this feature. Max int `json:"max"` @@ -34,6 +44,74 @@ type TotalFeatureImportanceStatistics struct { Min int `json:"min"` } +func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Max = value + case float64: + f := int(v) + s.Max = f + } + + case "mean_magnitude": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.MeanMagnitude = f + case float64: + f := Float64(v) + s.MeanMagnitude = f + } + + case "min": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Min = value + case float64: + f := int(v) + s.Min = f + } + + } + } + return nil +} + // NewTotalFeatureImportanceStatistics returns a TotalFeatureImportanceStatistics. func NewTotalFeatureImportanceStatistics() *TotalFeatureImportanceStatistics { r := &TotalFeatureImportanceStatistics{} diff --git a/typedapi/types/totalhits.go b/typedapi/types/totalhits.go old mode 100755 new mode 100644 index 3dc7b325c0..4adf03c6db --- a/typedapi/types/totalhits.go +++ b/typedapi/types/totalhits.go @@ -15,23 +15,46 @@ // specific language governing permissions and limitations // under the License. -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e - package types import ( + "bytes" + "encoding/json" + "strconv" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/totalhitsrelation" ) // TotalHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/18d160a8583deec1bbef274d2c0e563a0cd20e2f/specification/_global/search/_types/hits.ts#L94-L97 type TotalHits struct { Relation totalhitsrelation.TotalHitsRelation `json:"relation"` Value int64 `json:"value"` } +// UnmarshalJSON implements Unmarshaler interface, it handles the shortcut for total hits. +func (t *TotalHits) UnmarshalJSON(data []byte) error { + type stub TotalHits + tmp := stub{} + dec := json.NewDecoder(bytes.NewReader(data)) + if _, err := strconv.Atoi(string(data)); err == nil { + err := dec.Decode(&t.Value) + if err != nil { + return err + } + t.Relation = totalhitsrelation.Eq + } else { + err := dec.Decode(&tmp) + if err != nil { + return err + } + *t = TotalHits(tmp) + } + + return nil +} + // NewTotalHits returns a TotalHits. func NewTotalHits() *TotalHits { r := &TotalHits{} diff --git a/typedapi/types/totalhits_unmarshaler.go b/typedapi/types/totalhits_unmarshaler.go deleted file mode 100644 index 1f8dc89246..0000000000 --- a/typedapi/types/totalhits_unmarshaler.go +++ /dev/null @@ -1,48 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package types - -import ( - "bytes" - "encoding/json" - "strconv" - - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/totalhitsrelation" -) - -// UnmarshalJSON implements Unmarshaler interface, it handles the shortcut for total hits. -func (t *TotalHits) UnmarshalJSON(data []byte) error { - type stub TotalHits - tmp := stub{} - dec := json.NewDecoder(bytes.NewReader(data)) - if _, err := strconv.Atoi(string(data)); err == nil { - err := dec.Decode(&t.Value) - if err != nil { - return err - } - t.Relation = totalhitsrelation.Eq - } else { - err := dec.Decode(&tmp) - if err != nil { - return err - } - *t = TotalHits(tmp) - } - - return nil -} diff --git a/typedapi/types/totaluserprofiles.go b/typedapi/types/totaluserprofiles.go old mode 100755 new mode 100644 index 10cdb71fb7..578305629b --- a/typedapi/types/totaluserprofiles.go +++ b/typedapi/types/totaluserprofiles.go @@ -16,18 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TotalUserProfiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/suggest_user_profiles/Response.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/suggest_user_profiles/Response.ts#L24-L27 type TotalUserProfiles struct { Relation string `json:"relation"` Value int64 `json:"value"` } +func (s *TotalUserProfiles) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return err + } + + case "value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Value = value + case float64: + f := int64(v) + s.Value = f + } + + } + } + return nil +} + // NewTotalUserProfiles returns a TotalUserProfiles. func NewTotalUserProfiles() *TotalUserProfiles { r := &TotalUserProfiles{} diff --git a/typedapi/types/trackhits.go b/typedapi/types/trackhits.go old mode 100755 new mode 100644 index 191b23d8b9..98c4fbff81 --- a/typedapi/types/trackhits.go +++ b/typedapi/types/trackhits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/search/_types/hits.ts#L126-L134 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/search/_types/hits.ts#L126-L134 type TrackHits interface{} diff --git a/typedapi/types/trainedmodel.go b/typedapi/types/trainedmodel.go old mode 100755 new mode 100644 index 9ea6cf75c8..535ce62f61 --- a/typedapi/types/trainedmodel.go +++ b/typedapi/types/trainedmodel.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TrainedModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L60-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L60-L72 type TrainedModel struct { // Ensemble The definition for an ensemble model Ensemble *Ensemble `json:"ensemble,omitempty"` diff --git a/typedapi/types/trainedmodelassignment.go b/typedapi/types/trainedmodelassignment.go old mode 100755 new mode 100644 index 9985c62c13..6a2ccc2646 --- a/typedapi/types/trainedmodelassignment.go +++ b/typedapi/types/trainedmodelassignment.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/deploymentassignmentstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelAssignment type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L387-L402 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L393-L408 type TrainedModelAssignment struct { // AssignmentState The overall assignment state. AssignmentState deploymentassignmentstate.DeploymentAssignmentState `json:"assignment_state"` @@ -38,6 +46,65 @@ type TrainedModelAssignment struct { TaskParameters TrainedModelAssignmentTaskParameters `json:"task_parameters"` } +func (s *TrainedModelAssignment) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "assignment_state": + if err := dec.Decode(&s.AssignmentState); err != nil { + return err + } + + case "max_assigned_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxAssignedAllocations = &value + case float64: + f := int(v) + s.MaxAssignedAllocations = &f + } + + case "routing_table": + if s.RoutingTable == nil { + s.RoutingTable = make(map[string]TrainedModelAssignmentRoutingTable, 0) + } + if err := dec.Decode(&s.RoutingTable); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "task_parameters": + if err := dec.Decode(&s.TaskParameters); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelAssignment returns a TrainedModelAssignment. func NewTrainedModelAssignment() *TrainedModelAssignment { r := &TrainedModelAssignment{ diff --git a/typedapi/types/trainedmodelassignmentroutingtable.go b/typedapi/types/trainedmodelassignmentroutingtable.go old mode 100755 new mode 100644 index 661ea6a227..99499abae9 --- a/typedapi/types/trainedmodelassignmentroutingtable.go +++ b/typedapi/types/trainedmodelassignmentroutingtable.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/routingstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelAssignmentRoutingTable type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L358-L376 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L364-L382 type TrainedModelAssignmentRoutingTable struct { // CurrentAllocations Current number of allocations. CurrentAllocations int `json:"current_allocations"` @@ -39,6 +47,71 @@ type TrainedModelAssignmentRoutingTable struct { TargetAllocations int `json:"target_allocations"` } +func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CurrentAllocations = value + case float64: + f := int(v) + s.CurrentAllocations = f + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "routing_state": + if err := dec.Decode(&s.RoutingState); err != nil { + return err + } + + case "target_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TargetAllocations = value + case float64: + f := int(v) + s.TargetAllocations = f + } + + } + } + return nil +} + // NewTrainedModelAssignmentRoutingTable returns a TrainedModelAssignmentRoutingTable. func NewTrainedModelAssignmentRoutingTable() *TrainedModelAssignmentRoutingTable { r := &TrainedModelAssignmentRoutingTable{} diff --git a/typedapi/types/trainedmodelassignmenttaskparameters.go b/typedapi/types/trainedmodelassignmenttaskparameters.go old mode 100755 new mode 100644 index b3115e7346..6a7fda2216 --- a/typedapi/types/trainedmodelassignmenttaskparameters.go +++ b/typedapi/types/trainedmodelassignmenttaskparameters.go @@ -16,20 +16,30 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/trainingpriority" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelAssignmentTaskParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L305-L333 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L307-L339 type TrainedModelAssignmentTaskParameters struct { // CacheSize The size of the trained model cache. CacheSize ByteSize `json:"cache_size"` + // DeploymentId The unique identifier for the trained model deployment. + DeploymentId string `json:"deployment_id"` // ModelBytes The size of the trained model in bytes. ModelBytes int `json:"model_bytes"` // ModelId The unique identifier for the trained model. @@ -43,6 +53,110 @@ type TrainedModelAssignmentTaskParameters struct { ThreadsPerAllocation int `json:"threads_per_allocation"` } +func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_size": + if err := dec.Decode(&s.CacheSize); err != nil { + return err + } + + case "deployment_id": + if err := dec.Decode(&s.DeploymentId); err != nil { + return err + } + + case "model_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ModelBytes = value + case float64: + f := int(v) + s.ModelBytes = f + } + + case "model_id": + if err := dec.Decode(&s.ModelId); err != nil { + return err + } + + case "number_of_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfAllocations = value + case float64: + f := int(v) + s.NumberOfAllocations = f + } + + case "priority": + if err := dec.Decode(&s.Priority); err != nil { + return err + } + + case "queue_capacity": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueCapacity = value + case float64: + f := int(v) + s.QueueCapacity = f + } + + case "threads_per_allocation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ThreadsPerAllocation = value + case float64: + f := int(v) + s.ThreadsPerAllocation = f + } + + } + } + return nil +} + // NewTrainedModelAssignmentTaskParameters returns a TrainedModelAssignmentTaskParameters. func NewTrainedModelAssignmentTaskParameters() *TrainedModelAssignmentTaskParameters { r := &TrainedModelAssignmentTaskParameters{} diff --git a/typedapi/types/trainedmodelconfig.go b/typedapi/types/trainedmodelconfig.go old mode 100755 new mode 100644 index ca0648df8f..544f892c36 --- a/typedapi/types/trainedmodelconfig.go +++ b/typedapi/types/trainedmodelconfig.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/trainedmodeltype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L157-L189 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L157-L191 type TrainedModelConfig struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. @@ -41,6 +49,8 @@ type TrainedModelConfig struct { EstimatedHeapMemoryUsageBytes *int `json:"estimated_heap_memory_usage_bytes,omitempty"` // EstimatedOperations The estimated number of operations to use the trained model. EstimatedOperations *int `json:"estimated_operations,omitempty"` + // FullyDefined True if the full model definition is present. + FullyDefined *bool `json:"fully_defined,omitempty"` // InferenceConfig The default configuration for inference. This can be either a regression, // classification, or one of the many NLP focused configurations. It must match // the underlying definition.trained_model's target_type. @@ -65,6 +75,162 @@ type TrainedModelConfig struct { Version *string `json:"version,omitempty"` } +func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "compressed_definition": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CompressedDefinition = &o + + case "create_time": + if err := dec.Decode(&s.CreateTime); err != nil { + return err + } + + case "created_by": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreatedBy = &o + + case "default_field_map": + if s.DefaultFieldMap == nil { + s.DefaultFieldMap = make(map[string]string, 0) + } + if err := dec.Decode(&s.DefaultFieldMap); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "estimated_heap_memory_usage_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EstimatedHeapMemoryUsageBytes = &value + case float64: + f := int(v) + s.EstimatedHeapMemoryUsageBytes = &f + } + + case "estimated_operations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EstimatedOperations = &value + case float64: + f := int(v) + s.EstimatedOperations = &f + } + + case "fully_defined": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.FullyDefined = &value + case bool: + s.FullyDefined = &v + } + + case "inference_config": + if err := dec.Decode(&s.InferenceConfig); err != nil { + return err + } + + case "input": + if err := dec.Decode(&s.Input); err != nil { + return err + } + + case "license_level": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LicenseLevel = &o + + case "location": + if err := dec.Decode(&s.Location); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "model_id": + if err := dec.Decode(&s.ModelId); err != nil { + return err + } + + case "model_size_bytes": + if err := dec.Decode(&s.ModelSizeBytes); err != nil { + return err + } + + case "model_type": + if err := dec.Decode(&s.ModelType); err != nil { + return err + } + + case "tags": + if err := dec.Decode(&s.Tags); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelConfig returns a TrainedModelConfig. func NewTrainedModelConfig() *TrainedModelConfig { r := &TrainedModelConfig{ diff --git a/typedapi/types/trainedmodelconfiginput.go b/typedapi/types/trainedmodelconfiginput.go old mode 100755 new mode 100644 index a34bb851bf..f5d2d829b8 --- a/typedapi/types/trainedmodelconfiginput.go +++ b/typedapi/types/trainedmodelconfiginput.go @@ -16,18 +16,51 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TrainedModelConfigInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L191-L194 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L193-L196 type TrainedModelConfigInput struct { // FieldNames An array of input field names for the model. FieldNames []string `json:"field_names"` } +func (s *TrainedModelConfigInput) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field_names": + if err := dec.Decode(&s.FieldNames); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelConfigInput returns a TrainedModelConfigInput. func NewTrainedModelConfigInput() *TrainedModelConfigInput { r := &TrainedModelConfigInput{} diff --git a/typedapi/types/trainedmodelconfigmetadata.go b/typedapi/types/trainedmodelconfigmetadata.go old mode 100755 new mode 100644 index bdcc0df6bb..7f3c8602fa --- a/typedapi/types/trainedmodelconfigmetadata.go +++ b/typedapi/types/trainedmodelconfigmetadata.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TrainedModelConfigMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L196-L204 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L198-L206 type TrainedModelConfigMetadata struct { // FeatureImportanceBaseline An object that contains the baseline for feature importance values. For // regression analysis, it is a single value. For classification analysis, there diff --git a/typedapi/types/trainedmodeldeploymentallocationstatus.go b/typedapi/types/trainedmodeldeploymentallocationstatus.go old mode 100755 new mode 100644 index 2f2cd6302d..7f0e13a4e3 --- a/typedapi/types/trainedmodeldeploymentallocationstatus.go +++ b/typedapi/types/trainedmodeldeploymentallocationstatus.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/deploymentallocationstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelDeploymentAllocationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L378-L385 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L384-L391 type TrainedModelDeploymentAllocationStatus struct { // AllocationCount The current number of nodes where the model is allocated. AllocationCount int `json:"allocation_count"` @@ -36,6 +44,63 @@ type TrainedModelDeploymentAllocationStatus struct { TargetAllocationCount int `json:"target_allocation_count"` } +func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.AllocationCount = value + case float64: + f := int(v) + s.AllocationCount = f + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "target_allocation_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TargetAllocationCount = value + case float64: + f := int(v) + s.TargetAllocationCount = f + } + + } + } + return nil +} + // NewTrainedModelDeploymentAllocationStatus returns a TrainedModelDeploymentAllocationStatus. func NewTrainedModelDeploymentAllocationStatus() *TrainedModelDeploymentAllocationStatus { r := &TrainedModelDeploymentAllocationStatus{} diff --git a/typedapi/types/trainedmodeldeploymentnodesstats.go b/typedapi/types/trainedmodeldeploymentnodesstats.go old mode 100755 new mode 100644 index 601425d8d8..89583a1e0f --- a/typedapi/types/trainedmodeldeploymentnodesstats.go +++ b/typedapi/types/trainedmodeldeploymentnodesstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelDeploymentNodesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L128-L155 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L128-L155 type TrainedModelDeploymentNodesStats struct { // AverageInferenceTimeMs The average time for each inference call to complete on this node. AverageInferenceTimeMs Float64 `json:"average_inference_time_ms"` @@ -52,6 +62,173 @@ type TrainedModelDeploymentNodesStats struct { TimeoutCount int `json:"timeout_count"` } +func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "average_inference_time_ms": + if err := dec.Decode(&s.AverageInferenceTimeMs); err != nil { + return err + } + + case "error_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ErrorCount = value + case float64: + f := int(v) + s.ErrorCount = f + } + + case "inference_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InferenceCount = value + case float64: + f := int(v) + s.InferenceCount = f + } + + case "last_access": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LastAccess = value + case float64: + f := int64(v) + s.LastAccess = f + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "number_of_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfAllocations = value + case float64: + f := int(v) + s.NumberOfAllocations = f + } + + case "number_of_pending_requests": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfPendingRequests = value + case float64: + f := int(v) + s.NumberOfPendingRequests = f + } + + case "rejection_execution_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RejectionExecutionCount = value + case float64: + f := int(v) + s.RejectionExecutionCount = f + } + + case "routing_state": + if err := dec.Decode(&s.RoutingState); err != nil { + return err + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "threads_per_allocation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ThreadsPerAllocation = value + case float64: + f := int(v) + s.ThreadsPerAllocation = f + } + + case "timeout_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TimeoutCount = value + case float64: + f := int(v) + s.TimeoutCount = f + } + + } + } + return nil +} + // NewTrainedModelDeploymentNodesStats returns a TrainedModelDeploymentNodesStats. func NewTrainedModelDeploymentNodesStats() *TrainedModelDeploymentNodesStats { r := &TrainedModelDeploymentNodesStats{} diff --git a/typedapi/types/trainedmodeldeploymentstats.go b/typedapi/types/trainedmodeldeploymentstats.go old mode 100755 new mode 100644 index e9c6d67521..2c5cd2742b --- a/typedapi/types/trainedmodeldeploymentstats.go +++ b/typedapi/types/trainedmodeldeploymentstats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/deploymentstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // TrainedModelDeploymentStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L62-L97 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L62-L97 type TrainedModelDeploymentStats struct { // AllocationStatus The detailed allocation status for the deployment. AllocationStatus TrainedModelDeploymentAllocationStatus `json:"allocation_status"` @@ -62,6 +70,176 @@ type TrainedModelDeploymentStats struct { TimeoutCount int `json:"timeout_count"` } +func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation_status": + if err := dec.Decode(&s.AllocationStatus); err != nil { + return err + } + + case "cache_size": + if err := dec.Decode(&s.CacheSize); err != nil { + return err + } + + case "error_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ErrorCount = value + case float64: + f := int(v) + s.ErrorCount = f + } + + case "inference_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InferenceCount = value + case float64: + f := int(v) + s.InferenceCount = f + } + + case "model_id": + if err := dec.Decode(&s.ModelId); err != nil { + return err + } + + case "nodes": + if err := dec.Decode(&s.Nodes); err != nil { + return err + } + + case "number_of_allocations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NumberOfAllocations = value + case float64: + f := int(v) + s.NumberOfAllocations = f + } + + case "queue_capacity": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.QueueCapacity = value + case float64: + f := int(v) + s.QueueCapacity = f + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = o + + case "rejected_execution_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RejectedExecutionCount = value + case float64: + f := int(v) + s.RejectedExecutionCount = f + } + + case "start_time": + if err := dec.Decode(&s.StartTime); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "threads_per_allocation": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ThreadsPerAllocation = value + case float64: + f := int(v) + s.ThreadsPerAllocation = f + } + + case "timeout_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.TimeoutCount = value + case float64: + f := int(v) + s.TimeoutCount = f + } + + } + } + return nil +} + // NewTrainedModelDeploymentStats returns a TrainedModelDeploymentStats. func NewTrainedModelDeploymentStats() *TrainedModelDeploymentStats { r := &TrainedModelDeploymentStats{} diff --git a/typedapi/types/trainedmodelentities.go b/typedapi/types/trainedmodelentities.go old mode 100755 new mode 100644 index e0ea605fa3..75881a6232 --- a/typedapi/types/trainedmodelentities.go +++ b/typedapi/types/trainedmodelentities.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelEntities type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L392-L398 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L392-L398 type TrainedModelEntities struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -31,6 +41,90 @@ type TrainedModelEntities struct { StartPos int `json:"start_pos"` } +func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClassName = o + + case "class_probability": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ClassProbability = f + case float64: + f := Float64(v) + s.ClassProbability = f + } + + case "end_pos": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.EndPos = value + case float64: + f := int(v) + s.EndPos = f + } + + case "entity": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Entity = o + + case "start_pos": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.StartPos = value + case float64: + f := int(v) + s.StartPos = f + } + + } + } + return nil +} + // NewTrainedModelEntities returns a TrainedModelEntities. func NewTrainedModelEntities() *TrainedModelEntities { r := &TrainedModelEntities{} diff --git a/typedapi/types/trainedmodelinferenceclassimportance.go b/typedapi/types/trainedmodelinferenceclassimportance.go old mode 100755 new mode 100644 index 5888e5c430..8036ca53db --- a/typedapi/types/trainedmodelinferenceclassimportance.go +++ b/typedapi/types/trainedmodelinferenceclassimportance.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelInferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L405-L408 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L405-L408 type TrainedModelInferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` } +func (s *TrainedModelInferenceClassImportance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "class_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ClassName = o + + case "importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Importance = f + case float64: + f := Float64(v) + s.Importance = f + } + + } + } + return nil +} + // NewTrainedModelInferenceClassImportance returns a TrainedModelInferenceClassImportance. func NewTrainedModelInferenceClassImportance() *TrainedModelInferenceClassImportance { r := &TrainedModelInferenceClassImportance{} diff --git a/typedapi/types/trainedmodelinferencefeatureimportance.go b/typedapi/types/trainedmodelinferencefeatureimportance.go old mode 100755 new mode 100644 index 7ddc59519a..96e219dede --- a/typedapi/types/trainedmodelinferencefeatureimportance.go +++ b/typedapi/types/trainedmodelinferencefeatureimportance.go @@ -16,19 +16,78 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelInferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L410-L414 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L410-L414 type TrainedModelInferenceFeatureImportance struct { Classes []TrainedModelInferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` Importance *Float64 `json:"importance,omitempty"` } +func (s *TrainedModelInferenceFeatureImportance) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classes": + if err := dec.Decode(&s.Classes); err != nil { + return err + } + + case "feature_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.FeatureName = o + + case "importance": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Importance = &f + case float64: + f := Float64(v) + s.Importance = &f + } + + } + } + return nil +} + // NewTrainedModelInferenceFeatureImportance returns a TrainedModelInferenceFeatureImportance. func NewTrainedModelInferenceFeatureImportance() *TrainedModelInferenceFeatureImportance { r := &TrainedModelInferenceFeatureImportance{} diff --git a/typedapi/types/trainedmodelinferencestats.go b/typedapi/types/trainedmodelinferencestats.go old mode 100755 new mode 100644 index 62d088a1f8..69f42d2937 --- a/typedapi/types/trainedmodelinferencestats.go +++ b/typedapi/types/trainedmodelinferencestats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelInferenceStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L99-L119 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L99-L119 type TrainedModelInferenceStats struct { // CacheMissCount The number of times the model was loaded for inference and was not retrieved // from the cache. @@ -43,6 +53,95 @@ type TrainedModelInferenceStats struct { Timestamp DateTime `json:"timestamp"` } +func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "cache_miss_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.CacheMissCount = value + case float64: + f := int(v) + s.CacheMissCount = f + } + + case "failure_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailureCount = value + case float64: + f := int(v) + s.FailureCount = f + } + + case "inference_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InferenceCount = value + case float64: + f := int(v) + s.InferenceCount = f + } + + case "missing_all_fields_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MissingAllFieldsCount = value + case float64: + f := int(v) + s.MissingAllFieldsCount = f + } + + case "timestamp": + if err := dec.Decode(&s.Timestamp); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelInferenceStats returns a TrainedModelInferenceStats. func NewTrainedModelInferenceStats() *TrainedModelInferenceStats { r := &TrainedModelInferenceStats{} diff --git a/typedapi/types/trainedmodellocation.go b/typedapi/types/trainedmodellocation.go old mode 100755 new mode 100644 index 58edb41d71..ecc0d9080c --- a/typedapi/types/trainedmodellocation.go +++ b/typedapi/types/trainedmodellocation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TrainedModelLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L404-L406 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L410-L412 type TrainedModelLocation struct { Index TrainedModelLocationIndex `json:"index"` } diff --git a/typedapi/types/trainedmodellocationindex.go b/typedapi/types/trainedmodellocationindex.go old mode 100755 new mode 100644 index 35d02410b4..f21b6bceac --- a/typedapi/types/trainedmodellocationindex.go +++ b/typedapi/types/trainedmodellocationindex.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TrainedModelLocationIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L408-L410 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L414-L416 type TrainedModelLocationIndex struct { Name string `json:"name"` } +func (s *TrainedModelLocationIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelLocationIndex returns a TrainedModelLocationIndex. func NewTrainedModelLocationIndex() *TrainedModelLocationIndex { r := &TrainedModelLocationIndex{} diff --git a/typedapi/types/trainedmodelsizestats.go b/typedapi/types/trainedmodelsizestats.go old mode 100755 new mode 100644 index f9e9d7bbac..a6b82a1d43 --- a/typedapi/types/trainedmodelsizestats.go +++ b/typedapi/types/trainedmodelsizestats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L121-L126 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L121-L126 type TrainedModelSizeStats struct { // ModelSizeBytes The size of the model in bytes. ModelSizeBytes ByteSize `json:"model_size_bytes"` @@ -30,6 +40,47 @@ type TrainedModelSizeStats struct { RequiredNativeMemoryBytes int `json:"required_native_memory_bytes"` } +func (s *TrainedModelSizeStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "model_size_bytes": + if err := dec.Decode(&s.ModelSizeBytes); err != nil { + return err + } + + case "required_native_memory_bytes": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RequiredNativeMemoryBytes = value + case float64: + f := int(v) + s.RequiredNativeMemoryBytes = f + } + + } + } + return nil +} + // NewTrainedModelSizeStats returns a TrainedModelSizeStats. func NewTrainedModelSizeStats() *TrainedModelSizeStats { r := &TrainedModelSizeStats{} diff --git a/typedapi/types/trainedmodelsrecord.go b/typedapi/types/trainedmodelsrecord.go old mode 100755 new mode 100644 index 34806cc306..19f9d718d0 --- a/typedapi/types/trainedmodelsrecord.go +++ b/typedapi/types/trainedmodelsrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TrainedModelsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/ml_trained_models/types.ts#L23-L111 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/ml_trained_models/types.ts#L23-L111 type TrainedModelsRecord struct { // CreateTime The time the model was created CreateTime DateTime `json:"create_time,omitempty"` @@ -62,6 +70,158 @@ type TrainedModelsRecord struct { Version *string `json:"version,omitempty"` } +func (s *TrainedModelsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "create_time", "ct": + if err := dec.Decode(&s.CreateTime); err != nil { + return err + } + + case "created_by", "c", "createdBy": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreatedBy = &o + + case "data_frame.analysis", "dfa", "dataFrameAnalyticsAnalysis": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataFrameAnalysis = &o + + case "data_frame.create_time", "dft", "dataFrameAnalyticsTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataFrameCreateTime = &o + + case "data_frame.id", "dfid", "dataFrameAnalytics": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataFrameId = &o + + case "data_frame.source_index", "dfsi", "dataFrameAnalyticsSrcIndex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DataFrameSourceIndex = &o + + case "description", "d": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "heap_size", "hs", "modelHeapSize": + if err := dec.Decode(&s.HeapSize); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "ingest.count", "ic", "ingestCount": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IngestCount = &o + + case "ingest.current", "icurr", "ingestCurrent": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IngestCurrent = &o + + case "ingest.failed", "if", "ingestFailed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IngestFailed = &o + + case "ingest.pipelines", "ip", "ingestPipelines": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IngestPipelines = &o + + case "ingest.time", "it", "ingestTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IngestTime = &o + + case "license", "l": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.License = &o + + case "operations", "o", "modelOperations": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Operations = &o + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTrainedModelsRecord returns a TrainedModelsRecord. func NewTrainedModelsRecord() *TrainedModelsRecord { r := &TrainedModelsRecord{} diff --git a/typedapi/types/trainedmodelstats.go b/typedapi/types/trainedmodelstats.go old mode 100755 new mode 100644 index 178a6376aa..86a6820153 --- a/typedapi/types/trainedmodelstats.go +++ b/typedapi/types/trainedmodelstats.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // TrainedModelStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/TrainedModel.ts#L42-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/TrainedModel.ts#L42-L60 type TrainedModelStats struct { // DeploymentStats A collection of deployment stats, which is present when the models are // deployed. @@ -45,6 +51,70 @@ type TrainedModelStats struct { PipelineCount int `json:"pipeline_count"` } +func (s *TrainedModelStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "deployment_stats": + if err := dec.Decode(&s.DeploymentStats); err != nil { + return err + } + + case "inference_stats": + if err := dec.Decode(&s.InferenceStats); err != nil { + return err + } + + case "ingest": + if s.Ingest == nil { + s.Ingest = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Ingest); err != nil { + return err + } + + case "model_id": + if err := dec.Decode(&s.ModelId); err != nil { + return err + } + + case "model_size_stats": + if err := dec.Decode(&s.ModelSizeStats); err != nil { + return err + } + + case "pipeline_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.PipelineCount = value + case float64: + f := int(v) + s.PipelineCount = f + } + + } + } + return nil +} + // NewTrainedModelStats returns a TrainedModelStats. func NewTrainedModelStats() *TrainedModelStats { r := &TrainedModelStats{ diff --git a/typedapi/types/trainedmodeltree.go b/typedapi/types/trainedmodeltree.go old mode 100755 new mode 100644 index 29ada0bd4b..e49aa39949 --- a/typedapi/types/trainedmodeltree.go +++ b/typedapi/types/trainedmodeltree.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TrainedModelTree type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L74-L79 type TrainedModelTree struct { ClassificationLabels []string `json:"classification_labels,omitempty"` FeatureNames []string `json:"feature_names"` diff --git a/typedapi/types/trainedmodeltreenode.go b/typedapi/types/trainedmodeltreenode.go old mode 100755 new mode 100644 index 3cf2700961..3c03bd7734 --- a/typedapi/types/trainedmodeltreenode.go +++ b/typedapi/types/trainedmodeltreenode.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrainedModelTreeNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L81-L91 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L81-L91 type TrainedModelTreeNode struct { DecisionType *string `json:"decision_type,omitempty"` DefaultLeft *bool `json:"default_left,omitempty"` @@ -35,6 +45,160 @@ type TrainedModelTreeNode struct { Threshold *Float64 `json:"threshold,omitempty"` } +func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decision_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DecisionType = &o + + case "default_left": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DefaultLeft = &value + case bool: + s.DefaultLeft = &v + } + + case "leaf_value": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.LeafValue = &f + case float64: + f := Float64(v) + s.LeafValue = &f + } + + case "left_child": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.LeftChild = &value + case float64: + f := int(v) + s.LeftChild = &f + } + + case "node_index": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.NodeIndex = value + case float64: + f := int(v) + s.NodeIndex = f + } + + case "right_child": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.RightChild = &value + case float64: + f := int(v) + s.RightChild = &f + } + + case "split_feature": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SplitFeature = &value + case float64: + f := int(v) + s.SplitFeature = &f + } + + case "split_gain": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SplitGain = &value + case float64: + f := int(v) + s.SplitGain = &f + } + + case "threshold": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Threshold = &f + case float64: + f := Float64(v) + s.Threshold = &f + } + + } + } + return nil +} + // NewTrainedModelTreeNode returns a TrainedModelTreeNode. func NewTrainedModelTreeNode() *TrainedModelTreeNode { r := &TrainedModelTreeNode{} diff --git a/typedapi/types/transformauthorization.go b/typedapi/types/transformauthorization.go old mode 100755 new mode 100644 index 15903bb9dd..90b6b9ebe5 --- a/typedapi/types/transformauthorization.go +++ b/typedapi/types/transformauthorization.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TransformAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/Authorization.ts#L59-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/Authorization.ts#L59-L71 type TransformAuthorization struct { // ApiKey If an API key was used for the most recent update to the transform, its name // and identifier are listed in the response. diff --git a/typedapi/types/transformcontainer.go b/typedapi/types/transformcontainer.go old mode 100755 new mode 100644 index cc4ac6fcbc..3975433a5b --- a/typedapi/types/transformcontainer.go +++ b/typedapi/types/transformcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TransformContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Transform.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Transform.ts#L27-L34 type TransformContainer struct { Chain []TransformContainer `json:"chain,omitempty"` Script *ScriptTransform `json:"script,omitempty"` diff --git a/typedapi/types/transformdestination.go b/typedapi/types/transformdestination.go old mode 100755 new mode 100644 index 2a0f0dd1bd..95e75196d3 --- a/typedapi/types/transformdestination.go +++ b/typedapi/types/transformdestination.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TransformDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L34-L45 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L34-L45 type TransformDestination struct { // Index The destination index for the transform. The mappings of the destination // index are deduced based on the source @@ -34,6 +42,39 @@ type TransformDestination struct { Pipeline *string `json:"pipeline,omitempty"` } +func (s *TransformDestination) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "pipeline": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pipeline = &o + + } + } + return nil +} + // NewTransformDestination returns a TransformDestination. func NewTransformDestination() *TransformDestination { r := &TransformDestination{} diff --git a/typedapi/types/transformindexerstats.go b/typedapi/types/transformindexerstats.go old mode 100755 new mode 100644 index f8d5b0f96b..04154d0241 --- a/typedapi/types/transformindexerstats.go +++ b/typedapi/types/transformindexerstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TransformIndexerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L53-L71 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L53-L71 type TransformIndexerStats struct { DeleteTimeInMs *int64 `json:"delete_time_in_ms,omitempty"` DocumentsDeleted *int64 `json:"documents_deleted,omitempty"` @@ -43,6 +53,233 @@ type TransformIndexerStats struct { TriggerCount int64 `json:"trigger_count"` } +func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "delete_time_in_ms": + if err := dec.Decode(&s.DeleteTimeInMs); err != nil { + return err + } + + case "documents_deleted": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocumentsDeleted = &value + case float64: + f := int64(v) + s.DocumentsDeleted = &f + } + + case "documents_indexed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocumentsIndexed = value + case float64: + f := int64(v) + s.DocumentsIndexed = f + } + + case "documents_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocumentsProcessed = value + case float64: + f := int64(v) + s.DocumentsProcessed = f + } + + case "exponential_avg_checkpoint_duration_ms": + if err := dec.Decode(&s.ExponentialAvgCheckpointDurationMs); err != nil { + return err + } + + case "exponential_avg_documents_indexed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ExponentialAvgDocumentsIndexed = f + case float64: + f := Float64(v) + s.ExponentialAvgDocumentsIndexed = f + } + + case "exponential_avg_documents_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.ExponentialAvgDocumentsProcessed = f + case float64: + f := Float64(v) + s.ExponentialAvgDocumentsProcessed = f + } + + case "index_failures": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexFailures = value + case float64: + f := int64(v) + s.IndexFailures = f + } + + case "index_time_in_ms": + if err := dec.Decode(&s.IndexTimeInMs); err != nil { + return err + } + + case "index_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.IndexTotal = value + case float64: + f := int64(v) + s.IndexTotal = f + } + + case "pages_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PagesProcessed = value + case float64: + f := int64(v) + s.PagesProcessed = f + } + + case "processing_time_in_ms": + if err := dec.Decode(&s.ProcessingTimeInMs); err != nil { + return err + } + + case "processing_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ProcessingTotal = value + case float64: + f := int64(v) + s.ProcessingTotal = f + } + + case "search_failures": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SearchFailures = value + case float64: + f := int64(v) + s.SearchFailures = f + } + + case "search_time_in_ms": + if err := dec.Decode(&s.SearchTimeInMs); err != nil { + return err + } + + case "search_total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SearchTotal = value + case float64: + f := int64(v) + s.SearchTotal = f + } + + case "trigger_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TriggerCount = value + case float64: + f := int64(v) + s.TriggerCount = f + } + + } + } + return nil +} + // NewTransformIndexerStats returns a TransformIndexerStats. func NewTransformIndexerStats() *TransformIndexerStats { r := &TransformIndexerStats{} diff --git a/typedapi/types/transformprogress.go b/typedapi/types/transformprogress.go old mode 100755 new mode 100644 index 6edc0ab769..163bafde59 --- a/typedapi/types/transformprogress.go +++ b/typedapi/types/transformprogress.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TransformProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L45-L51 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L45-L51 type TransformProgress struct { DocsIndexed int64 `json:"docs_indexed"` DocsProcessed int64 `json:"docs_processed"` @@ -31,6 +41,102 @@ type TransformProgress struct { TotalDocs int64 `json:"total_docs"` } +func (s *TransformProgress) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "docs_indexed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocsIndexed = value + case float64: + f := int64(v) + s.DocsIndexed = f + } + + case "docs_processed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocsProcessed = value + case float64: + f := int64(v) + s.DocsProcessed = f + } + + case "docs_remaining": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocsRemaining = value + case float64: + f := int64(v) + s.DocsRemaining = f + } + + case "percent_complete": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.PercentComplete = f + case float64: + f := Float64(v) + s.PercentComplete = f + } + + case "total_docs": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalDocs = value + case float64: + f := int64(v) + s.TotalDocs = f + } + + } + } + return nil +} + // NewTransformProgress returns a TransformProgress. func NewTransformProgress() *TransformProgress { r := &TransformProgress{} diff --git a/typedapi/types/transformsource.go b/typedapi/types/transformsource.go old mode 100755 new mode 100644 index 401dc3002e..d9798bbb39 --- a/typedapi/types/transformsource.go +++ b/typedapi/types/transformsource.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TransformSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/_types/Transform.ts#L145-L163 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/_types/Transform.ts#L145-L163 type TransformSource struct { // Index The source indices for the transform. It can be a single index, an index // pattern (for example, `"my-index-*""`), an @@ -38,7 +46,53 @@ type TransformSource struct { // RuntimeMappings Definitions of search-time runtime fields that can be used by the transform. // For search runtime fields all data // nodes, including remote nodes, must be 7.12 or later. - RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` + RuntimeMappings RuntimeFields `json:"runtime_mappings,omitempty"` +} + +func (s *TransformSource) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Index = append(s.Index, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Index); err != nil { + return err + } + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + case "runtime_mappings": + if err := dec.Decode(&s.RuntimeMappings); err != nil { + return err + } + + } + } + return nil } // NewTransformSource returns a TransformSource. diff --git a/typedapi/types/transformsrecord.go b/typedapi/types/transformsrecord.go old mode 100755 new mode 100644 index fcd82e9847..91490183d5 --- a/typedapi/types/transformsrecord.go +++ b/typedapi/types/transformsrecord.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TransformsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cat/transforms/types.ts#L22-L187 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cat/transforms/types.ts#L22-L187 type TransformsRecord struct { // ChangesLastDetectionTime changes last detected time ChangesLastDetectionTime string `json:"changes_last_detection_time,omitempty"` @@ -92,6 +100,284 @@ type TransformsRecord struct { Version *string `json:"version,omitempty"` } +func (s *TransformsRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "changes_last_detection_time", "cldt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ChangesLastDetectionTime = o + + case "checkpoint", "c": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Checkpoint = &o + + case "checkpoint_duration_time_exp_avg", "cdtea", "checkpointTimeExpAvg": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CheckpointDurationTimeExpAvg = &o + + case "checkpoint_progress", "cp", "checkpointProgress": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CheckpointProgress = o + + case "create_time", "ct", "createTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.CreateTime = &o + + case "delete_time", "dtime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DeleteTime = &o + + case "description", "d": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "dest_index", "di", "destIndex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DestIndex = &o + + case "docs_per_second", "dps": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocsPerSecond = &o + + case "documents_deleted", "docd": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocumentsDeleted = &o + + case "documents_indexed", "doci": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocumentsIndexed = &o + + case "documents_processed", "docp", "documentsProcessed": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.DocumentsProcessed = &o + + case "frequency", "f": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Frequency = &o + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "index_failure", "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexFailure = &o + + case "index_time", "itime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexTime = &o + + case "index_total", "it": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexTotal = &o + + case "indexed_documents_exp_avg", "idea": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.IndexedDocumentsExpAvg = &o + + case "last_search_time", "lst", "lastSearchTime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LastSearchTime = o + + case "max_page_search_size", "mpsz": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.MaxPageSearchSize = &o + + case "pages_processed", "pp": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.PagesProcessed = &o + + case "pipeline", "p": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Pipeline = &o + + case "processed_documents_exp_avg", "pdea": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ProcessedDocumentsExpAvg = &o + + case "processing_time", "pt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ProcessingTime = &o + + case "reason", "r": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + case "search_failure", "sf": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchFailure = &o + + case "search_time", "stime": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchTime = &o + + case "search_total", "st": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SearchTotal = &o + + case "source_index", "si", "sourceIndex": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.SourceIndex = &o + + case "state", "s": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = &o + + case "transform_type", "tt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TransformType = &o + + case "trigger_count", "tc": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TriggerCount = &o + + case "version", "v": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTransformsRecord returns a TransformsRecord. func NewTransformsRecord() *TransformsRecord { r := &TransformsRecord{} diff --git a/typedapi/types/transformstats.go b/typedapi/types/transformstats.go old mode 100755 new mode 100644 index fe6c931a6a..0b3cc0dbc0 --- a/typedapi/types/transformstats.go +++ b/typedapi/types/transformstats.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TransformStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L31-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L31-L39 type TransformStats struct { Checkpointing Checkpointing `json:"checkpointing"` Health *TransformStatsHealth `json:"health,omitempty"` @@ -33,6 +41,67 @@ type TransformStats struct { Stats TransformIndexerStats `json:"stats"` } +func (s *TransformStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "checkpointing": + if err := dec.Decode(&s.Checkpointing); err != nil { + return err + } + + case "health": + if err := dec.Decode(&s.Health); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "node": + if err := dec.Decode(&s.Node); err != nil { + return err + } + + case "reason": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Reason = &o + + case "state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.State = o + + case "stats": + if err := dec.Decode(&s.Stats); err != nil { + return err + } + + } + } + return nil +} + // NewTransformStats returns a TransformStats. func NewTransformStats() *TransformStats { r := &TransformStats{} diff --git a/typedapi/types/transformstatshealth.go b/typedapi/types/transformstatshealth.go old mode 100755 new mode 100644 index be98eb347c..37e39b1eb6 --- a/typedapi/types/transformstatshealth.go +++ b/typedapi/types/transformstatshealth.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // TransformStatsHealth type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform_stats/types.ts#L41-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform_stats/types.ts#L41-L43 type TransformStatsHealth struct { Status healthstatus.HealthStatus `json:"status"` } diff --git a/typedapi/types/transformsummary.go b/typedapi/types/transformsummary.go old mode 100755 new mode 100644 index 3495db75c3..a2319ecbe6 --- a/typedapi/types/transformsummary.go +++ b/typedapi/types/transformsummary.go @@ -16,17 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TransformSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/transform/get_transform/types.ts#L33-L61 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/transform/get_transform/types.ts#L33-L61 type TransformSummary struct { // Authorization The security privileges that the transform uses to run its queries. If // Elastic Stack security features were disabled at the time of the most recent @@ -37,11 +41,11 @@ type TransformSummary struct { // Description Free text description of the transform. Description *string `json:"description,omitempty"` // Dest The destination for the transform. - Dest ReindexDestination `json:"dest"` - Frequency Duration `json:"frequency,omitempty"` - Id string `json:"id"` - Latest *Latest `json:"latest,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Dest ReindexDestination `json:"dest"` + Frequency Duration `json:"frequency,omitempty"` + Id string `json:"id"` + Latest *Latest `json:"latest,omitempty"` + Meta_ Metadata `json:"_meta,omitempty"` // Pivot The pivot method transforms the data by aggregating and grouping it. Pivot *Pivot `json:"pivot,omitempty"` RetentionPolicy *RetentionPolicyContainer `json:"retention_policy,omitempty"` @@ -56,6 +60,99 @@ type TransformSummary struct { Version *string `json:"version,omitempty"` } +func (s *TransformSummary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "authorization": + if err := dec.Decode(&s.Authorization); err != nil { + return err + } + + case "create_time": + if err := dec.Decode(&s.CreateTime); err != nil { + return err + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "dest": + if err := dec.Decode(&s.Dest); err != nil { + return err + } + + case "frequency": + if err := dec.Decode(&s.Frequency); err != nil { + return err + } + + case "id": + if err := dec.Decode(&s.Id); err != nil { + return err + } + + case "latest": + if err := dec.Decode(&s.Latest); err != nil { + return err + } + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return err + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return err + } + + case "retention_policy": + if err := dec.Decode(&s.RetentionPolicy); err != nil { + return err + } + + case "settings": + if err := dec.Decode(&s.Settings); err != nil { + return err + } + + case "source": + if err := dec.Decode(&s.Source); err != nil { + return err + } + + case "sync": + if err := dec.Decode(&s.Sync); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTransformSummary returns a TransformSummary. func NewTransformSummary() *TransformSummary { r := &TransformSummary{} diff --git a/typedapi/types/transientmetadataconfig.go b/typedapi/types/transientmetadataconfig.go old mode 100755 new mode 100644 index 0f6d69ab18..fcb19e4e73 --- a/typedapi/types/transientmetadataconfig.go +++ b/typedapi/types/transientmetadataconfig.go @@ -16,17 +16,61 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TransientMetadataConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/TransientMetadataConfig.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/TransientMetadataConfig.ts#L20-L22 type TransientMetadataConfig struct { Enabled bool `json:"enabled"` } +func (s *TransientMetadataConfig) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + } + } + return nil +} + // NewTransientMetadataConfig returns a TransientMetadataConfig. func NewTransientMetadataConfig() *TransientMetadataConfig { r := &TransientMetadataConfig{} diff --git a/typedapi/types/translog.go b/typedapi/types/translog.go old mode 100755 new mode 100644 index 06a91d068d..11067d320c --- a/typedapi/types/translog.go +++ b/typedapi/types/translog.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/translogdurability" + + "bytes" + "errors" + "io" + + "encoding/json" ) // Translog type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L332-L354 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L332-L354 type Translog struct { // Durability Whether or not to `fsync` and commit the translog after every index, delete, // update, or bulk request. @@ -49,6 +55,46 @@ type Translog struct { SyncInterval Duration `json:"sync_interval,omitempty"` } +func (s *Translog) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "durability": + if err := dec.Decode(&s.Durability); err != nil { + return err + } + + case "flush_threshold_size": + if err := dec.Decode(&s.FlushThresholdSize); err != nil { + return err + } + + case "retention": + if err := dec.Decode(&s.Retention); err != nil { + return err + } + + case "sync_interval": + if err := dec.Decode(&s.SyncInterval); err != nil { + return err + } + + } + } + return nil +} + // NewTranslog returns a Translog. func NewTranslog() *Translog { r := &Translog{} diff --git a/typedapi/types/translogretention.go b/typedapi/types/translogretention.go old mode 100755 new mode 100644 index 05f25a9b13..2b71c66bf4 --- a/typedapi/types/translogretention.go +++ b/typedapi/types/translogretention.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TranslogRetention type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/_types/IndexSettings.ts#L373-L392 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/_types/IndexSettings.ts#L373-L392 type TranslogRetention struct { // Age This controls the maximum duration for which translog files are kept by each // shard. Keeping more @@ -46,6 +54,36 @@ type TranslogRetention struct { Size ByteSize `json:"size,omitempty"` } +func (s *TranslogRetention) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "age": + if err := dec.Decode(&s.Age); err != nil { + return err + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + } + } + return nil +} + // NewTranslogRetention returns a TranslogRetention. func NewTranslogRetention() *TranslogRetention { r := &TranslogRetention{} diff --git a/typedapi/types/translogstats.go b/typedapi/types/translogstats.go old mode 100755 new mode 100644 index ac36d076dd..802406244e --- a/typedapi/types/translogstats.go +++ b/typedapi/types/translogstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TranslogStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L242-L250 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L242-L250 type TranslogStats struct { EarliestLastModifiedAge int64 `json:"earliest_last_modified_age"` Operations int64 `json:"operations"` @@ -33,6 +43,118 @@ type TranslogStats struct { UncommittedSizeInBytes int64 `json:"uncommitted_size_in_bytes"` } +func (s *TranslogStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "earliest_last_modified_age": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.EarliestLastModifiedAge = value + case float64: + f := int64(v) + s.EarliestLastModifiedAge = f + } + + case "operations": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Operations = value + case float64: + f := int64(v) + s.Operations = f + } + + case "size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Size = &o + + case "size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SizeInBytes = value + case float64: + f := int64(v) + s.SizeInBytes = f + } + + case "uncommitted_operations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.UncommittedOperations = value + case float64: + f := int(v) + s.UncommittedOperations = f + } + + case "uncommitted_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.UncommittedSize = &o + + case "uncommitted_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.UncommittedSizeInBytes = value + case float64: + f := int64(v) + s.UncommittedSizeInBytes = f + } + + } + } + return nil +} + // NewTranslogStats returns a TranslogStats. func NewTranslogStats() *TranslogStats { r := &TranslogStats{} diff --git a/typedapi/types/translogstatus.go b/typedapi/types/translogstatus.go old mode 100755 new mode 100644 index 96f3883982..29d1af947a --- a/typedapi/types/translogstatus.go +++ b/typedapi/types/translogstatus.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TranslogStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L102-L109 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L102-L109 type TranslogStatus struct { Percent Percentage `json:"percent"` Recovered int64 `json:"recovered"` @@ -32,6 +42,86 @@ type TranslogStatus struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *TranslogStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "percent": + if err := dec.Decode(&s.Percent); err != nil { + return err + } + + case "recovered": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Recovered = value + case float64: + f := int64(v) + s.Recovered = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "total_on_start": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalOnStart = value + case float64: + f := int64(v) + s.TotalOnStart = f + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewTranslogStatus returns a TranslogStatus. func NewTranslogStatus() *TranslogStatus { r := &TranslogStatus{} diff --git a/typedapi/types/transport.go b/typedapi/types/transport.go old mode 100755 new mode 100644 index 8187770c7d..430174f82a --- a/typedapi/types/transport.go +++ b/typedapi/types/transport.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Transport type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L420-L431 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L420-L431 type Transport struct { InboundHandlingTimeHistogram []TransportHistogram `json:"inbound_handling_time_histogram,omitempty"` OutboundHandlingTimeHistogram []TransportHistogram `json:"outbound_handling_time_histogram,omitempty"` @@ -36,6 +46,143 @@ type Transport struct { TxSizeInBytes *int64 `json:"tx_size_in_bytes,omitempty"` } +func (s *Transport) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "inbound_handling_time_histogram": + if err := dec.Decode(&s.InboundHandlingTimeHistogram); err != nil { + return err + } + + case "outbound_handling_time_histogram": + if err := dec.Decode(&s.OutboundHandlingTimeHistogram); err != nil { + return err + } + + case "rx_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RxCount = &value + case float64: + f := int64(v) + s.RxCount = &f + } + + case "rx_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RxSize = &o + + case "rx_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.RxSizeInBytes = &value + case float64: + f := int64(v) + s.RxSizeInBytes = &f + } + + case "server_open": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ServerOpen = &value + case float64: + f := int(v) + s.ServerOpen = &f + } + + case "total_outbound_connections": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TotalOutboundConnections = &value + case float64: + f := int64(v) + s.TotalOutboundConnections = &f + } + + case "tx_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TxCount = &value + case float64: + f := int64(v) + s.TxCount = &f + } + + case "tx_size": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TxSize = &o + + case "tx_size_in_bytes": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.TxSizeInBytes = &value + case float64: + f := int64(v) + s.TxSizeInBytes = &f + } + + } + } + return nil +} + // NewTransport returns a Transport. func NewTransport() *Transport { r := &Transport{} diff --git a/typedapi/types/transporthistogram.go b/typedapi/types/transporthistogram.go old mode 100755 new mode 100644 index 2f204c7770..5ae70ca4c1 --- a/typedapi/types/transporthistogram.go +++ b/typedapi/types/transporthistogram.go @@ -16,19 +16,94 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TransportHistogram type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/nodes/_types/Stats.ts#L433-L437 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/nodes/_types/Stats.ts#L433-L437 type TransportHistogram struct { Count *int64 `json:"count,omitempty"` GeMillis *int64 `json:"ge_millis,omitempty"` LtMillis *int64 `json:"lt_millis,omitempty"` } +func (s *TransportHistogram) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int64(v) + s.Count = &f + } + + case "ge_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.GeMillis = &value + case float64: + f := int64(v) + s.GeMillis = &f + } + + case "lt_millis": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LtMillis = &value + case float64: + f := int64(v) + s.LtMillis = &f + } + + } + } + return nil +} + // NewTransportHistogram returns a TransportHistogram. func NewTransportHistogram() *TransportHistogram { r := &TransportHistogram{} diff --git a/typedapi/types/triggercontainer.go b/typedapi/types/triggercontainer.go old mode 100755 new mode 100644 index 1f81444a65..bac7842516 --- a/typedapi/types/triggercontainer.go +++ b/typedapi/types/triggercontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TriggerContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Trigger.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Trigger.ts#L23-L28 type TriggerContainer struct { Schedule *ScheduleContainer `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventcontainer.go b/typedapi/types/triggereventcontainer.go old mode 100755 new mode 100644 index 7916c50831..76fb0724b5 --- a/typedapi/types/triggereventcontainer.go +++ b/typedapi/types/triggereventcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // TriggerEventContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Trigger.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Trigger.ts#L32-L37 type TriggerEventContainer struct { Schedule *ScheduleTriggerEvent `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventresult.go b/typedapi/types/triggereventresult.go old mode 100755 new mode 100644 index b115c06b67..de914535ac --- a/typedapi/types/triggereventresult.go +++ b/typedapi/types/triggereventresult.go @@ -16,19 +16,65 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TriggerEventResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Trigger.ts#L39-L43 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Trigger.ts#L39-L43 type TriggerEventResult struct { Manual TriggerEventContainer `json:"manual"` TriggeredTime DateTime `json:"triggered_time"` Type string `json:"type"` } +func (s *TriggerEventResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "manual": + if err := dec.Decode(&s.Manual); err != nil { + return err + } + + case "triggered_time": + if err := dec.Decode(&s.TriggeredTime); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewTriggerEventResult returns a TriggerEventResult. func NewTriggerEventResult() *TriggerEventResult { r := &TriggerEventResult{} diff --git a/typedapi/types/trimprocessor.go b/typedapi/types/trimprocessor.go old mode 100755 new mode 100644 index 821c54c844..b5103ba5f0 --- a/typedapi/types/trimprocessor.go +++ b/typedapi/types/trimprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TrimProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L362-L366 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L362-L366 type TrimProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type TrimProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *TrimProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewTrimProcessor returns a TrimProcessor. func NewTrimProcessor() *TrimProcessor { r := &TrimProcessor{} diff --git a/typedapi/types/trimtokenfilter.go b/typedapi/types/trimtokenfilter.go old mode 100755 new mode 100644 index 2cc6c1bfbc..2645bfb241 --- a/typedapi/types/trimtokenfilter.go +++ b/typedapi/types/trimtokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TrimTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L324-L326 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L326-L328 type TrimTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *TrimTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTrimTokenFilter returns a TrimTokenFilter. func NewTrimTokenFilter() *TrimTokenFilter { r := &TrimTokenFilter{} diff --git a/typedapi/types/truncatetokenfilter.go b/typedapi/types/truncatetokenfilter.go old mode 100755 new mode 100644 index 0f08c0d631..34a9a24e32 --- a/typedapi/types/truncatetokenfilter.go +++ b/typedapi/types/truncatetokenfilter.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TruncateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L328-L331 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L330-L333 type TruncateTokenFilter struct { Length *int `json:"length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *TruncateTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Length = &value + case float64: + f := int(v) + s.Length = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewTruncateTokenFilter returns a TruncateTokenFilter. func NewTruncateTokenFilter() *TruncateTokenFilter { r := &TruncateTokenFilter{} diff --git a/typedapi/types/ttestaggregate.go b/typedapi/types/ttestaggregate.go old mode 100755 new mode 100644 index e08449703f..2150ef6099 --- a/typedapi/types/ttestaggregate.go +++ b/typedapi/types/ttestaggregate.go @@ -16,21 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // TTestAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L726-L730 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L735-L739 type TTestAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Value Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` +} + +func (s *TTestAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil } // NewTTestAggregate returns a TTestAggregate. diff --git a/typedapi/types/ttestaggregation.go b/typedapi/types/ttestaggregation.go old mode 100755 new mode 100644 index fce3b64219..4b56a051a8 --- a/typedapi/types/ttestaggregation.go +++ b/typedapi/types/ttestaggregation.go @@ -16,25 +16,77 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/ttesttype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // TTestAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L153-L157 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L153-L157 type TTestAggregation struct { - A *TestPopulation `json:"a,omitempty"` - B *TestPopulation `json:"b,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Type *ttesttype.TTestType `json:"type,omitempty"` + A *TestPopulation `json:"a,omitempty"` + B *TestPopulation `json:"b,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Type *ttesttype.TTestType `json:"type,omitempty"` +} + +func (s *TTestAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "a": + if err := dec.Decode(&s.A); err != nil { + return err + } + + case "b": + if err := dec.Decode(&s.B); err != nil { + return err + } + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + } + } + return nil } // NewTTestAggregation returns a TTestAggregation. diff --git a/typedapi/types/typefieldmappings.go b/typedapi/types/typefieldmappings.go old mode 100755 new mode 100644 index 471ad025ba..bfea588706 --- a/typedapi/types/typefieldmappings.go +++ b/typedapi/types/typefieldmappings.go @@ -16,17 +16,53 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // TypeFieldMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/get_field_mapping/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/get_field_mapping/types.ts#L24-L26 type TypeFieldMappings struct { Mappings map[string]FieldMapping `json:"mappings"` } +func (s *TypeFieldMappings) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "mappings": + if s.Mappings == nil { + s.Mappings = make(map[string]FieldMapping, 0) + } + if err := dec.Decode(&s.Mappings); err != nil { + return err + } + + } + } + return nil +} + // NewTypeFieldMappings returns a TypeFieldMappings. func NewTypeFieldMappings() *TypeFieldMappings { r := &TypeFieldMappings{ diff --git a/typedapi/types/typemapping.go b/typedapi/types/typemapping.go old mode 100755 new mode 100644 index 4a0eb7f307..c4e1c785b6 --- a/typedapi/types/typemapping.go +++ b/typedapi/types/typemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // TypeMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/TypeMapping.ts#L34-L55 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/TypeMapping.ts#L34-L53 type TypeMapping struct { AllField *AllField `json:"all_field,omitempty"` DataStreamTimestamp_ *DataStreamTimestamp `json:"_data_stream_timestamp,omitempty"` @@ -43,7 +45,7 @@ type TypeMapping struct { Enabled *bool `json:"enabled,omitempty"` FieldNames_ *FieldNamesField `json:"_field_names,omitempty"` IndexField *IndexField `json:"index_field,omitempty"` - Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` + Meta_ Metadata `json:"_meta,omitempty"` NumericDetection *bool `json:"numeric_detection,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Routing_ *RoutingField `json:"_routing,omitempty"` @@ -53,6 +55,7 @@ type TypeMapping struct { } func (s *TypeMapping) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,8 +80,17 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "date_detection": - if err := dec.Decode(&s.DateDetection); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DateDetection = &value + case bool: + s.DateDetection = &v } case "dynamic": @@ -97,8 +109,17 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "enabled": - if err := dec.Decode(&s.Enabled); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v } case "_field_names": @@ -117,11 +138,23 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "numeric_detection": - if err := dec.Decode(&s.NumericDetection); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.NumericDetection = &value + case bool: + s.NumericDetection = &v } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -409,7 +442,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -421,6 +454,9 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "runtime": + if s.Runtime == nil { + s.Runtime = make(map[string]RuntimeField, 0) + } if err := dec.Decode(&s.Runtime); err != nil { return err } diff --git a/typedapi/types/typequery.go b/typedapi/types/typequery.go old mode 100755 new mode 100644 index 35dc58f0e1..62dea90dd1 --- a/typedapi/types/typequery.go +++ b/typedapi/types/typequery.go @@ -16,19 +16,81 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // TypeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L145-L147 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L145-L147 type TypeQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Value string `json:"value"` } +func (s *TypeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = o + + } + } + return nil +} + // NewTypeQuery returns a TypeQuery. func NewTypeQuery() *TypeQuery { r := &TypeQuery{} diff --git a/typedapi/types/uaxemailurltokenizer.go b/typedapi/types/uaxemailurltokenizer.go old mode 100755 new mode 100644 index 3c985a0934..0bf6c8e75f --- a/typedapi/types/uaxemailurltokenizer.go +++ b/typedapi/types/uaxemailurltokenizer.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // UaxEmailUrlTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L109-L112 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L109-L112 type UaxEmailUrlTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *UaxEmailUrlTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTokenLength = &value + case float64: + f := int(v) + s.MaxTokenLength = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewUaxEmailUrlTokenizer returns a UaxEmailUrlTokenizer. func NewUaxEmailUrlTokenizer() *UaxEmailUrlTokenizer { r := &UaxEmailUrlTokenizer{} diff --git a/typedapi/types/unassignedinformation.go b/typedapi/types/unassignedinformation.go old mode 100755 new mode 100644 index cf5182e168..f80326993d --- a/typedapi/types/unassignedinformation.go +++ b/typedapi/types/unassignedinformation.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/unassignedinformationreason" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // UnassignedInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/cluster/allocation_explain/types.ts#L117-L125 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/cluster/allocation_explain/types.ts#L117-L125 type UnassignedInformation struct { AllocationStatus *string `json:"allocation_status,omitempty"` At DateTime `json:"at"` @@ -37,6 +45,90 @@ type UnassignedInformation struct { Reason unassignedinformationreason.UnassignedInformationReason `json:"reason"` } +func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allocation_status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.AllocationStatus = &o + + case "at": + if err := dec.Decode(&s.At); err != nil { + return err + } + + case "delayed": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Delayed = &value + case bool: + s.Delayed = &v + } + + case "details": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Details = &o + + case "failed_allocation_attempts": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.FailedAllocationAttempts = &value + case float64: + f := int(v) + s.FailedAllocationAttempts = &f + } + + case "last_allocation_status": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.LastAllocationStatus = &o + + case "reason": + if err := dec.Decode(&s.Reason); err != nil { + return err + } + + } + } + return nil +} + // NewUnassignedInformation returns a UnassignedInformation. func NewUnassignedInformation() *UnassignedInformation { r := &UnassignedInformation{} diff --git a/typedapi/types/uniquetokenfilter.go b/typedapi/types/uniquetokenfilter.go old mode 100755 new mode 100644 index 8a3c781895..ac4fadd8e4 --- a/typedapi/types/uniquetokenfilter.go +++ b/typedapi/types/uniquetokenfilter.go @@ -16,19 +16,73 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // UniqueTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L333-L336 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L335-L338 type UniqueTokenFilter struct { OnlyOnSamePosition *bool `json:"only_on_same_position,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *UniqueTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "only_on_same_position": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.OnlyOnSamePosition = &value + case bool: + s.OnlyOnSamePosition = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewUniqueTokenFilter returns a UniqueTokenFilter. func NewUniqueTokenFilter() *UniqueTokenFilter { r := &UniqueTokenFilter{} diff --git a/typedapi/types/unmappedraretermsaggregate.go b/typedapi/types/unmappedraretermsaggregate.go old mode 100755 new mode 100644 index db4e12b197..846097ec57 --- a/typedapi/types/unmappedraretermsaggregate.go +++ b/typedapi/types/unmappedraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // UnmappedRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L452-L458 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L453-L459 type UnmappedRareTermsAggregate struct { - Buckets BucketsVoid `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Buckets BucketsVoid `json:"buckets"` + Meta Metadata `json:"meta,omitempty"` } func (s *UnmappedRareTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *UnmappedRareTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/unmappedsampleraggregate.go b/typedapi/types/unmappedsampleraggregate.go old mode 100755 new mode 100644 index bf87df6ecf..7af1c1d09a --- a/typedapi/types/unmappedsampleraggregate.go +++ b/typedapi/types/unmappedsampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,19 +29,22 @@ import ( "strings" + "strconv" + "encoding/json" ) // UnmappedSamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L500-L501 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L501-L502 type UnmappedSamplerAggregate struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + Meta Metadata `json:"meta,omitempty"` } func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -55,451 +58,19 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "meta": @@ -507,6 +78,519 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { return err } + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } + } } return nil @@ -531,6 +615,7 @@ func (s UnmappedSamplerAggregate) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/unmappedsignificanttermsaggregate.go b/typedapi/types/unmappedsignificanttermsaggregate.go old mode 100755 new mode 100644 index b2b2c4255e..0ce6c53aca --- a/typedapi/types/unmappedsignificanttermsaggregate.go +++ b/typedapi/types/unmappedsignificanttermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // UnmappedSignificantTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L609-L615 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L610-L616 type UnmappedSignificantTermsAggregate struct { - BgCount *int64 `json:"bg_count,omitempty"` - Buckets BucketsVoid `json:"buckets"` - DocCount *int64 `json:"doc_count,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + BgCount *int64 `json:"bg_count,omitempty"` + Buckets BucketsVoid `json:"buckets"` + DocCount *int64 `json:"doc_count,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -53,8 +56,18 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - if err := dec.Decode(&s.BgCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.BgCount = &value + case float64: + f := int64(v) + s.BgCount = &f } case "buckets": @@ -64,21 +77,33 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCount = &value + case float64: + f := int64(v) + s.DocCount = &f } case "meta": diff --git a/typedapi/types/unmappedtermsaggregate.go b/typedapi/types/unmappedtermsaggregate.go old mode 100755 new mode 100644 index 00bbc54a50..90da4ab810 --- a/typedapi/types/unmappedtermsaggregate.go +++ b/typedapi/types/unmappedtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,20 +25,23 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // UnmappedTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L422-L428 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L423-L429 type UnmappedTermsAggregate struct { - Buckets BucketsVoid `json:"buckets"` - DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` + Buckets BucketsVoid `json:"buckets"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Meta Metadata `json:"meta,omitempty"` + SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` } func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -59,21 +62,33 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': - o := make(map[string]struct{}, 0) - localDec.Decode(&o) + o := make(map[string]interface{}, 0) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': - o := []struct{}{} - localDec.Decode(&o) + o := []interface{}{} + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } case "doc_count_error_upper_bound": - if err := dec.Decode(&s.DocCountErrorUpperBound); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.DocCountErrorUpperBound = &value + case float64: + f := int64(v) + s.DocCountErrorUpperBound = &f } case "meta": @@ -82,8 +97,18 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - if err := dec.Decode(&s.SumOtherDocCount); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.SumOtherDocCount = &value + case float64: + f := int64(v) + s.SumOtherDocCount = &f } } diff --git a/typedapi/types/unrateddocument.go b/typedapi/types/unrateddocument.go old mode 100755 new mode 100644 index 99ebdbe651..9979e9b111 --- a/typedapi/types/unrateddocument.go +++ b/typedapi/types/unrateddocument.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // UnratedDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/rank_eval/types.ts#L147-L150 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/rank_eval/types.ts#L147-L150 type UnratedDocument struct { Id_ string `json:"_id"` Index_ string `json:"_index"` } +func (s *UnratedDocument) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_id": + if err := dec.Decode(&s.Id_); err != nil { + return err + } + + case "_index": + if err := dec.Decode(&s.Index_); err != nil { + return err + } + + } + } + return nil +} + // NewUnratedDocument returns a UnratedDocument. func NewUnratedDocument() *UnratedDocument { r := &UnratedDocument{} diff --git a/typedapi/types/unsignedlongnumberproperty.go b/typedapi/types/unsignedlongnumberproperty.go old mode 100755 new mode 100644 index 714c8c3531..4acbdef10b --- a/typedapi/types/unsignedlongnumberproperty.go +++ b/typedapi/types/unsignedlongnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // UnsignedLongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L166-L169 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L166-L169 type UnsignedLongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -63,6 +65,7 @@ type UnsignedLongNumberProperty struct { } func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -77,23 +80,63 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - if err := dec.Decode(&s.Boost); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = &f + case float64: + f := Float64(v) + s.Boost = &f } case "coerce": - if err := dec.Decode(&s.Coerce); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Coerce = &value + case bool: + s.Coerce = &v } case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -102,6 +145,9 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -389,28 +435,60 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "ignore_malformed": - if err := dec.Decode(&s.IgnoreMalformed); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMalformed = &value + case bool: + s.IgnoreMalformed = &v } case "index": - if err := dec.Decode(&s.Index); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Index = &value + case bool: + s.Index = &v } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } @@ -426,6 +504,9 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -713,7 +794,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } @@ -725,18 +806,39 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "time_series_dimension": - if err := dec.Decode(&s.TimeSeriesDimension); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.TimeSeriesDimension = &value + case bool: + s.TimeSeriesDimension = &v } case "time_series_metric": diff --git a/typedapi/types/updatebyqueryrethrottlenode.go b/typedapi/types/updatebyqueryrethrottlenode.go old mode 100755 new mode 100644 index 5c1bd81f54..bf66251690 --- a/typedapi/types/updatebyqueryrethrottlenode.go +++ b/typedapi/types/updatebyqueryrethrottlenode.go @@ -16,32 +16,99 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/noderole" + + "bytes" + "errors" + "io" + + "encoding/json" ) // UpdateByQueryRethrottleNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 type UpdateByQueryRethrottleNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` Ip string `json:"ip"` Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` - Tasks map[TaskId]TaskInfo `json:"tasks"` + Tasks map[string]TaskInfo `json:"tasks"` TransportAddress string `json:"transport_address"` } +func (s *UpdateByQueryRethrottleNode) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "attributes": + if s.Attributes == nil { + s.Attributes = make(map[string]string, 0) + } + if err := dec.Decode(&s.Attributes); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "ip": + if err := dec.Decode(&s.Ip); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "tasks": + if s.Tasks == nil { + s.Tasks = make(map[string]TaskInfo, 0) + } + if err := dec.Decode(&s.Tasks); err != nil { + return err + } + + case "transport_address": + if err := dec.Decode(&s.TransportAddress); err != nil { + return err + } + + } + } + return nil +} + // NewUpdateByQueryRethrottleNode returns a UpdateByQueryRethrottleNode. func NewUpdateByQueryRethrottleNode() *UpdateByQueryRethrottleNode { r := &UpdateByQueryRethrottleNode{ Attributes: make(map[string]string, 0), - Tasks: make(map[TaskId]TaskInfo, 0), + Tasks: make(map[string]TaskInfo, 0), } return r diff --git a/typedapi/types/uppercaseprocessor.go b/typedapi/types/uppercaseprocessor.go old mode 100755 new mode 100644 index 84a8dd9569..9ae964880a --- a/typedapi/types/uppercaseprocessor.go +++ b/typedapi/types/uppercaseprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // UppercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L368-L372 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L368-L372 type UppercaseProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type UppercaseProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewUppercaseProcessor returns a UppercaseProcessor. func NewUppercaseProcessor() *UppercaseProcessor { r := &UppercaseProcessor{} diff --git a/typedapi/types/uppercasetokenfilter.go b/typedapi/types/uppercasetokenfilter.go old mode 100755 new mode 100644 index 29925e0bb2..cba0034563 --- a/typedapi/types/uppercasetokenfilter.go +++ b/typedapi/types/uppercasetokenfilter.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // UppercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L338-L340 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L340-L342 type UppercaseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *UppercaseTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewUppercaseTokenFilter returns a UppercaseTokenFilter. func NewUppercaseTokenFilter() *UppercaseTokenFilter { r := &UppercaseTokenFilter{} diff --git a/typedapi/types/urldecodeprocessor.go b/typedapi/types/urldecodeprocessor.go old mode 100755 new mode 100644 index 47e4dfb7cb..c12f78bcb7 --- a/typedapi/types/urldecodeprocessor.go +++ b/typedapi/types/urldecodeprocessor.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // UrlDecodeProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L374-L378 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L374-L378 type UrlDecodeProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -34,6 +44,93 @@ type UrlDecodeProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewUrlDecodeProcessor returns a UrlDecodeProcessor. func NewUrlDecodeProcessor() *UrlDecodeProcessor { r := &UrlDecodeProcessor{} diff --git a/typedapi/types/usagestatsindex.go b/typedapi/types/usagestatsindex.go old mode 100755 new mode 100644 index a6c15e1cc2..f10dd8022f --- a/typedapi/types/usagestatsindex.go +++ b/typedapi/types/usagestatsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // UsageStatsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L38-L40 type UsageStatsIndex struct { Shards []UsageStatsShards `json:"shards"` } diff --git a/typedapi/types/usagestatsshards.go b/typedapi/types/usagestatsshards.go old mode 100755 new mode 100644 index cd99d4af7d..4dab97df96 --- a/typedapi/types/usagestatsshards.go +++ b/typedapi/types/usagestatsshards.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // UsageStatsShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L42-L47 type UsageStatsShards struct { Routing ShardRouting `json:"routing"` Stats IndicesShardsStats `json:"stats"` @@ -30,6 +38,49 @@ type UsageStatsShards struct { TrackingStartedAtMillis int64 `json:"tracking_started_at_millis"` } +func (s *UsageStatsShards) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "routing": + if err := dec.Decode(&s.Routing); err != nil { + return err + } + + case "stats": + if err := dec.Decode(&s.Stats); err != nil { + return err + } + + case "tracking_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TrackingId = o + + case "tracking_started_at_millis": + if err := dec.Decode(&s.TrackingStartedAtMillis); err != nil { + return err + } + + } + } + return nil +} + // NewUsageStatsShards returns a UsageStatsShards. func NewUsageStatsShards() *UsageStatsShards { r := &UsageStatsShards{} diff --git a/typedapi/types/user.go b/typedapi/types/user.go old mode 100755 new mode 100644 index c5f498c59c..70892eb51a --- a/typedapi/types/user.go +++ b/typedapi/types/user.go @@ -16,25 +16,98 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // User type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/User.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/User.ts#L23-L31 type User struct { - Email string `json:"email,omitempty"` - Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata"` - ProfileUid *string `json:"profile_uid,omitempty"` - Roles []string `json:"roles"` - Username string `json:"username"` + Email string `json:"email,omitempty"` + Enabled bool `json:"enabled"` + FullName string `json:"full_name,omitempty"` + Metadata Metadata `json:"metadata"` + ProfileUid *string `json:"profile_uid,omitempty"` + Roles []string `json:"roles"` + Username string `json:"username"` +} + +func (s *User) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "email": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Email = o + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "full_name": + if err := dec.Decode(&s.FullName); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "profile_uid": + if err := dec.Decode(&s.ProfileUid); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil } // NewUser returns a User. diff --git a/typedapi/types/useragentprocessor.go b/typedapi/types/useragentprocessor.go old mode 100755 new mode 100644 index 9d2c5ab3ee..39e9106cba --- a/typedapi/types/useragentprocessor.go +++ b/typedapi/types/useragentprocessor.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/useragentproperty" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // UserAgentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ingest/_types/Processors.ts#L115-L121 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ingest/_types/Processors.ts#L115-L121 type UserAgentProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` @@ -40,6 +48,106 @@ type UserAgentProcessor struct { TargetField *string `json:"target_field,omitempty"` } +func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "if": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.If = &o + + case "ignore_failure": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreFailure = &value + case bool: + s.IgnoreFailure = &v + } + + case "ignore_missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreMissing = &value + case bool: + s.IgnoreMissing = &v + } + + case "on_failure": + if err := dec.Decode(&s.OnFailure); err != nil { + return err + } + + case "options": + if err := dec.Decode(&s.Options); err != nil { + return err + } + + case "regex_file": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.RegexFile = &o + + case "tag": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Tag = &o + + case "target_field": + if err := dec.Decode(&s.TargetField); err != nil { + return err + } + + } + } + return nil +} + // NewUserAgentProcessor returns a UserAgentProcessor. func NewUserAgentProcessor() *UserAgentProcessor { r := &UserAgentProcessor{} diff --git a/typedapi/types/userindicesprivileges.go b/typedapi/types/userindicesprivileges.go old mode 100755 new mode 100644 index f6ee01da7c..42335232bf --- a/typedapi/types/userindicesprivileges.go +++ b/typedapi/types/userindicesprivileges.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexprivilege" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // UserIndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/Privileges.ts#L106-L128 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/Privileges.ts#L107-L129 type UserIndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -49,6 +57,71 @@ type UserIndicesPrivileges struct { Query []IndicesPrivilegesQuery `json:"query,omitempty"` } +func (s *UserIndicesPrivileges) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_restricted_indices": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AllowRestrictedIndices = value + case bool: + s.AllowRestrictedIndices = v + } + + case "field_security": + if err := dec.Decode(&s.FieldSecurity); err != nil { + return err + } + + case "names": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.Names = append(s.Names, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Names); err != nil { + return err + } + } + + case "privileges": + if err := dec.Decode(&s.Privileges); err != nil { + return err + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return err + } + + } + } + return nil +} + // NewUserIndicesPrivileges returns a UserIndicesPrivileges. func NewUserIndicesPrivileges() *UserIndicesPrivileges { r := &UserIndicesPrivileges{} diff --git a/typedapi/types/userprofile.go b/typedapi/types/userprofile.go old mode 100755 new mode 100644 index d8fca9efae..e6dbb5a52f --- a/typedapi/types/userprofile.go +++ b/typedapi/types/userprofile.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // UserProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/UserProfile.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/UserProfile.ts#L42-L48 type UserProfile struct { Data map[string]json.RawMessage `json:"data"` Enabled *bool `json:"enabled,omitempty"` @@ -35,6 +41,66 @@ type UserProfile struct { User UserProfileUser `json:"user"` } +func (s *UserProfile) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data": + if s.Data == nil { + s.Data = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Data); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "labels": + if s.Labels == nil { + s.Labels = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Labels); err != nil { + return err + } + + case "uid": + if err := dec.Decode(&s.Uid); err != nil { + return err + } + + case "user": + if err := dec.Decode(&s.User); err != nil { + return err + } + + } + } + return nil +} + // NewUserProfile returns a UserProfile. func NewUserProfile() *UserProfile { r := &UserProfile{ diff --git a/typedapi/types/userprofilehitmetadata.go b/typedapi/types/userprofilehitmetadata.go old mode 100755 new mode 100644 index fe1b38679c..4547634494 --- a/typedapi/types/userprofilehitmetadata.go +++ b/typedapi/types/userprofilehitmetadata.go @@ -16,18 +16,68 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // UserProfileHitMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/UserProfile.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/UserProfile.ts#L28-L31 type UserProfileHitMetadata struct { PrimaryTerm_ int64 `json:"_primary_term"` SeqNo_ int64 `json:"_seq_no"` } +func (s *UserProfileHitMetadata) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "_primary_term": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.PrimaryTerm_ = value + case float64: + f := int64(v) + s.PrimaryTerm_ = f + } + + case "_seq_no": + if err := dec.Decode(&s.SeqNo_); err != nil { + return err + } + + } + } + return nil +} + // NewUserProfileHitMetadata returns a UserProfileHitMetadata. func NewUserProfileHitMetadata() *UserProfileHitMetadata { r := &UserProfileHitMetadata{} diff --git a/typedapi/types/userprofileuser.go b/typedapi/types/userprofileuser.go old mode 100755 new mode 100644 index 5f621c7726..7aa0eeb6f8 --- a/typedapi/types/userprofileuser.go +++ b/typedapi/types/userprofileuser.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // UserProfileUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/UserProfile.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/UserProfile.ts#L33-L40 type UserProfileUser struct { Email string `json:"email,omitempty"` FullName string `json:"full_name,omitempty"` @@ -32,6 +40,59 @@ type UserProfileUser struct { Username string `json:"username"` } +func (s *UserProfileUser) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "email": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Email = o + + case "full_name": + if err := dec.Decode(&s.FullName); err != nil { + return err + } + + case "realm_domain": + if err := dec.Decode(&s.RealmDomain); err != nil { + return err + } + + case "realm_name": + if err := dec.Decode(&s.RealmName); err != nil { + return err + } + + case "roles": + if err := dec.Decode(&s.Roles); err != nil { + return err + } + + case "username": + if err := dec.Decode(&s.Username); err != nil { + return err + } + + } + } + return nil +} + // NewUserProfileUser returns a UserProfileUser. func NewUserProfileUser() *UserProfileUser { r := &UserProfileUser{} diff --git a/typedapi/types/userprofilewithmetadata.go b/typedapi/types/userprofilewithmetadata.go old mode 100755 new mode 100644 index 96c51306e9..5ec4894f97 --- a/typedapi/types/userprofilewithmetadata.go +++ b/typedapi/types/userprofilewithmetadata.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + + "strconv" + "encoding/json" ) // UserProfileWithMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/_types/UserProfile.ts#L50-L53 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/_types/UserProfile.ts#L50-L53 type UserProfileWithMetadata struct { Data map[string]json.RawMessage `json:"data"` Doc_ UserProfileHitMetadata `json:"_doc"` @@ -37,6 +43,86 @@ type UserProfileWithMetadata struct { User UserProfileUser `json:"user"` } +func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "data": + if s.Data == nil { + s.Data = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Data); err != nil { + return err + } + + case "_doc": + if err := dec.Decode(&s.Doc_); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = &value + case bool: + s.Enabled = &v + } + + case "labels": + if s.Labels == nil { + s.Labels = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Labels); err != nil { + return err + } + + case "last_synchronized": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.LastSynchronized = value + case float64: + f := int64(v) + s.LastSynchronized = f + } + + case "uid": + if err := dec.Decode(&s.Uid); err != nil { + return err + } + + case "user": + if err := dec.Decode(&s.User); err != nil { + return err + } + + } + } + return nil +} + // NewUserProfileWithMetadata returns a UserProfileWithMetadata. func NewUserProfileWithMetadata() *UserProfileWithMetadata { r := &UserProfileWithMetadata{ diff --git a/typedapi/types/userrealm.go b/typedapi/types/userrealm.go old mode 100755 new mode 100644 index 64e66a522b..cc5b8aa475 --- a/typedapi/types/userrealm.go +++ b/typedapi/types/userrealm.go @@ -16,18 +16,59 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // UserRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/security/get_token/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/security/get_token/types.ts#L30-L33 type UserRealm struct { Name string `json:"name"` Type string `json:"type"` } +func (s *UserRealm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Type = o + + } + } + return nil +} + // NewUserRealm returns a UserRealm. func NewUserRealm() *UserRealm { r := &UserRealm{} diff --git a/typedapi/types/validationloss.go b/typedapi/types/validationloss.go old mode 100755 new mode 100644 index 9689832a52..e18194a121 --- a/typedapi/types/validationloss.go +++ b/typedapi/types/validationloss.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // ValidationLoss type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/DataframeAnalytics.ts#L428-L433 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/DataframeAnalytics.ts#L428-L433 type ValidationLoss struct { // FoldValues Validation loss values for every added decision tree during the forest // growing procedure. diff --git a/typedapi/types/valuecountaggregate.go b/typedapi/types/valuecountaggregate.go old mode 100755 new mode 100644 index 1014274cc1..59e20b6a6b --- a/typedapi/types/valuecountaggregate.go +++ b/typedapi/types/valuecountaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // ValueCountAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L217-L221 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L218-L222 type ValueCountAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type ValueCountAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *ValueCountAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewValueCountAggregate returns a ValueCountAggregate. func NewValueCountAggregate() *ValueCountAggregate { r := &ValueCountAggregate{} diff --git a/typedapi/types/valuecountaggregation.go b/typedapi/types/valuecountaggregation.go old mode 100755 new mode 100644 index e2d04f5b3e..532271e472 --- a/typedapi/types/valuecountaggregation.go +++ b/typedapi/types/valuecountaggregation.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // ValueCountAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L196-L196 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L196-L196 type ValueCountAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` @@ -30,6 +38,49 @@ type ValueCountAggregation struct { Script Script `json:"script,omitempty"` } +func (s *ValueCountAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "missing": + if err := dec.Decode(&s.Missing); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewValueCountAggregation returns a ValueCountAggregation. func NewValueCountAggregation() *ValueCountAggregation { r := &ValueCountAggregation{} diff --git a/typedapi/types/variablewidthhistogramaggregate.go b/typedapi/types/variablewidthhistogramaggregate.go old mode 100755 new mode 100644 index 3a018d6702..0a6df84418 --- a/typedapi/types/variablewidthhistogramaggregate.go +++ b/typedapi/types/variablewidthhistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -30,13 +30,14 @@ import ( // VariableWidthHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L361-L363 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L362-L364 type VariableWidthHistogramAggregate struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` } func (s *VariableWidthHistogramAggregate) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -57,15 +58,17 @@ func (s *VariableWidthHistogramAggregate) UnmarshalJSON(data []byte) error { source := bytes.NewReader(rawMsg) localDec := json.NewDecoder(source) switch rawMsg[0] { - case '{': o := make(map[string]VariableWidthHistogramBucket, 0) - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o - case '[': o := []VariableWidthHistogramBucket{} - localDec.Decode(&o) + if err := localDec.Decode(&o); err != nil { + return err + } s.Buckets = o } diff --git a/typedapi/types/variablewidthhistogramaggregation.go b/typedapi/types/variablewidthhistogramaggregation.go old mode 100755 new mode 100644 index 04ca2e98aa..96101b68cb --- a/typedapi/types/variablewidthhistogramaggregation.go +++ b/typedapi/types/variablewidthhistogramaggregation.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // VariableWidthHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/bucket.ts#L430-L435 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/bucket.ts#L430-L435 type VariableWidthHistogramAggregation struct { Buckets *int `json:"buckets,omitempty"` Field *string `json:"field,omitempty"` @@ -30,6 +40,79 @@ type VariableWidthHistogramAggregation struct { ShardSize *int `json:"shard_size,omitempty"` } +func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "buckets": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Buckets = &value + case float64: + f := int(v) + s.Buckets = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "initial_buffer": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.InitialBuffer = &value + case float64: + f := int(v) + s.InitialBuffer = &f + } + + case "shard_size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.ShardSize = &value + case float64: + f := int(v) + s.ShardSize = &f + } + + } + } + return nil +} + // NewVariableWidthHistogramAggregation returns a VariableWidthHistogramAggregation. func NewVariableWidthHistogramAggregation() *VariableWidthHistogramAggregation { r := &VariableWidthHistogramAggregation{} diff --git a/typedapi/types/variablewidthhistogrambucket.go b/typedapi/types/variablewidthhistogrambucket.go old mode 100755 new mode 100644 index 86dfc3128a..7e167ff0d6 --- a/typedapi/types/variablewidthhistogrambucket.go +++ b/typedapi/types/variablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -29,12 +29,14 @@ import ( "strings" + "strconv" + "encoding/json" ) // VariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L365-L372 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L366-L373 type VariableWidthHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -47,6 +49,7 @@ type VariableWidthHistogramBucket struct { } func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -60,482 +63,605 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { switch t { - case "aggregations": - for dec.More() { - tt, err := dec.Token() + case "doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) if err != nil { - if errors.Is(err, io.EOF) { - break - } return err } - if value, ok := tt.(string); ok { - if strings.Contains(value, "#") { - elems := strings.Split(value, "#") - if len(elems) == 2 { - switch elems[0] { - case "cardinality": - o := NewCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentiles": - o := NewHdrPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "hdr_percentile_ranks": - o := NewHdrPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentiles": - o := NewTDigestPercentilesAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "tdigest_percentile_ranks": - o := NewTDigestPercentileRanksAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "percentiles_bucket": - o := NewPercentilesBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "median_absolute_deviation": - o := NewMedianAbsoluteDeviationAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "min": - o := NewMinAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "max": - o := NewMaxAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sum": - o := NewSumAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "avg": - o := NewAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "weighted_avg": - o := NewWeightedAvgAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "value_count": - o := NewValueCountAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_value": - o := NewSimpleValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "derivative": - o := NewDerivativeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "bucket_metric_value": - o := NewBucketMetricValueAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats": - o := NewStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "stats_bucket": - o := NewStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats": - o := NewExtendedStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "extended_stats_bucket": - o := NewExtendedStatsBucketAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_bounds": - o := NewGeoBoundsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_centroid": - o := NewGeoCentroidAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "histogram": - o := NewHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_histogram": - o := NewDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "auto_date_histogram": - o := NewAutoDateHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "variable_width_histogram": - o := NewVariableWidthHistogramAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sterms": - o := NewStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lterms": - o := NewLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "dterms": - o := NewDoubleTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umterms": - o := NewUnmappedTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "lrareterms": - o := NewLongRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "srareterms": - o := NewStringRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umrareterms": - o := NewUnmappedRareTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "multi_terms": - o := NewMultiTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "missing": - o := NewMissingAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "nested": - o := NewNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "reverse_nested": - o := NewReverseNestedAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "global": - o := NewGlobalAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filter": - o := NewFilterAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "children": - o := NewChildrenAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "parent": - o := NewParentAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sampler": - o := NewSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "unmapped_sampler": - o := NewUnmappedSamplerAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohash_grid": - o := NewGeoHashGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geotile_grid": - o := NewGeoTileGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geohex_grid": - o := NewGeoHexGridAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "range": - o := NewRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "date_range": - o := NewDateRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_distance": - o := NewGeoDistanceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_range": - o := NewIpRangeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "ip_prefix": - o := NewIpPrefixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "filters": - o := NewFiltersAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "adjacency_matrix": - o := NewAdjacencyMatrixAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "siglterms": - o := NewSignificantLongTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "sigsterms": - o := NewSignificantStringTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "umsigterms": - o := NewUnmappedSignificantTermsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "composite": - o := NewCompositeAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "scripted_metric": - o := NewScriptedMetricAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_hits": - o := NewTopHitsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "inference": - o := NewInferenceAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "string_stats": - o := NewStringStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "box_plot": - o := NewBoxPlotAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "top_metrics": - o := NewTopMetricsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "t_test": - o := NewTTestAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "rate": - o := NewRateAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "simple_long_value": - o := NewCumulativeCardinalityAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "matrix_stats": - o := NewMatrixStatsAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - case "geo_line": - o := NewGeoLineAggregate() - if err := dec.Decode(o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - default: - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[elems[1]] = o - } - } else { - return errors.New("cannot decode JSON for field Aggregations") - } - } else { - o := make(map[string]interface{}, 0) - if err := dec.Decode(&o); err != nil { - return err - } - s.Aggregations[value] = o - } - } - } - - case "doc_count": - if err := dec.Decode(&s.DocCount); err != nil { - return err + s.DocCount = value + case float64: + f := int64(v) + s.DocCount = f } case "key": - if err := dec.Decode(&s.Key); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Key = f + case float64: + f := Float64(v) + s.Key = f } case "key_as_string": - if err := dec.Decode(&s.KeyAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.KeyAsString = &o case "max": - if err := dec.Decode(&s.Max); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Max = f + case float64: + f := Float64(v) + s.Max = f } case "max_as_string": - if err := dec.Decode(&s.MaxAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.MaxAsString = &o case "min": - if err := dec.Decode(&s.Min); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Min = f + case float64: + f := Float64(v) + s.Min = f } case "min_as_string": - if err := dec.Decode(&s.MinAsString); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.MinAsString = &o + + default: + + if value, ok := t.(string); ok { + if strings.Contains(value, "#") { + elems := strings.Split(value, "#") + if len(elems) == 2 { + if s.Aggregations == nil { + s.Aggregations = make(map[string]Aggregate, 0) + } + switch elems[0] { + + case "cardinality": + o := NewCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentiles": + o := NewHdrPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "hdr_percentile_ranks": + o := NewHdrPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentiles": + o := NewTDigestPercentilesAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "tdigest_percentile_ranks": + o := NewTDigestPercentileRanksAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "percentiles_bucket": + o := NewPercentilesBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "median_absolute_deviation": + o := NewMedianAbsoluteDeviationAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "min": + o := NewMinAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "max": + o := NewMaxAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sum": + o := NewSumAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "avg": + o := NewAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "weighted_avg": + o := NewWeightedAvgAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "value_count": + o := NewValueCountAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_value": + o := NewSimpleValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "derivative": + o := NewDerivativeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "bucket_metric_value": + o := NewBucketMetricValueAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats": + o := NewStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "stats_bucket": + o := NewStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats": + o := NewExtendedStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "extended_stats_bucket": + o := NewExtendedStatsBucketAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_bounds": + o := NewGeoBoundsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_centroid": + o := NewGeoCentroidAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "histogram": + o := NewHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_histogram": + o := NewDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "auto_date_histogram": + o := NewAutoDateHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "variable_width_histogram": + o := NewVariableWidthHistogramAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sterms": + o := NewStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lterms": + o := NewLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "dterms": + o := NewDoubleTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umterms": + o := NewUnmappedTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "lrareterms": + o := NewLongRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "srareterms": + o := NewStringRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umrareterms": + o := NewUnmappedRareTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "multi_terms": + o := NewMultiTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "missing": + o := NewMissingAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "nested": + o := NewNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "reverse_nested": + o := NewReverseNestedAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "global": + o := NewGlobalAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filter": + o := NewFilterAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "children": + o := NewChildrenAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "parent": + o := NewParentAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sampler": + o := NewSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "unmapped_sampler": + o := NewUnmappedSamplerAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohash_grid": + o := NewGeoHashGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geotile_grid": + o := NewGeoTileGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geohex_grid": + o := NewGeoHexGridAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "range": + o := NewRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "date_range": + o := NewDateRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_distance": + o := NewGeoDistanceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_range": + o := NewIpRangeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "ip_prefix": + o := NewIpPrefixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "filters": + o := NewFiltersAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "adjacency_matrix": + o := NewAdjacencyMatrixAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "siglterms": + o := NewSignificantLongTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "sigsterms": + o := NewSignificantStringTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "umsigterms": + o := NewUnmappedSignificantTermsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "composite": + o := NewCompositeAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "frequent_item_sets": + o := NewFrequentItemSetsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "scripted_metric": + o := NewScriptedMetricAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_hits": + o := NewTopHitsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "inference": + o := NewInferenceAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "string_stats": + o := NewStringStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "box_plot": + o := NewBoxPlotAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "top_metrics": + o := NewTopMetricsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "t_test": + o := NewTTestAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "rate": + o := NewRateAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "simple_long_value": + o := NewCumulativeCardinalityAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "matrix_stats": + o := NewMatrixStatsAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + case "geo_line": + o := NewGeoLineAggregate() + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + + default: + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[elems[1]] = o + } + } else { + return errors.New("cannot decode JSON for field Aggregations") + } + } else { + o := make(map[string]interface{}, 0) + if err := dec.Decode(&o); err != nil { + return err + } + s.Aggregations[value] = o + } + } } } @@ -561,6 +687,7 @@ func (s VariableWidthHistogramBucket) MarshalJSON() ([]byte, error) { for key, value := range s.Aggregations { tmp[fmt.Sprintf("%s", key)] = value } + delete(tmp, "Aggregations") data, err = json.Marshal(tmp) if err != nil { diff --git a/typedapi/types/vector.go b/typedapi/types/vector.go old mode 100755 new mode 100644 index 14a7e25fd8..433f69c114 --- a/typedapi/types/vector.go +++ b/typedapi/types/vector.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Vector type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L445-L449 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L445-L449 type Vector struct { Available bool `json:"available"` DenseVectorDimsAvgCount int `json:"dense_vector_dims_avg_count"` @@ -31,6 +41,102 @@ type Vector struct { SparseVectorFieldsCount *int `json:"sparse_vector_fields_count,omitempty"` } +func (s *Vector) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "dense_vector_dims_avg_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DenseVectorDimsAvgCount = value + case float64: + f := int(v) + s.DenseVectorDimsAvgCount = f + } + + case "dense_vector_fields_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.DenseVectorFieldsCount = value + case float64: + f := int(v) + s.DenseVectorFieldsCount = f + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "sparse_vector_fields_count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.SparseVectorFieldsCount = &value + case float64: + f := int(v) + s.SparseVectorFieldsCount = &f + } + + } + } + return nil +} + // NewVector returns a Vector. func NewVector() *Vector { r := &Vector{} diff --git a/typedapi/types/verifyindex.go b/typedapi/types/verifyindex.go old mode 100755 new mode 100644 index 2475e7ea99..31d31abb53 --- a/typedapi/types/verifyindex.go +++ b/typedapi/types/verifyindex.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // VerifyIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/indices/recovery/types.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/indices/recovery/types.ts#L111-L116 type VerifyIndex struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` @@ -30,6 +38,46 @@ type VerifyIndex struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *VerifyIndex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "check_index_time": + if err := dec.Decode(&s.CheckIndexTime); err != nil { + return err + } + + case "check_index_time_in_millis": + if err := dec.Decode(&s.CheckIndexTimeInMillis); err != nil { + return err + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewVerifyIndex returns a VerifyIndex. func NewVerifyIndex() *VerifyIndex { r := &VerifyIndex{} diff --git a/typedapi/types/versionproperty.go b/typedapi/types/versionproperty.go old mode 100755 new mode 100644 index 8b18634a51..9a917e6325 --- a/typedapi/types/versionproperty.go +++ b/typedapi/types/versionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // VersionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L265-L267 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L265-L267 type VersionProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -48,6 +50,7 @@ type VersionProperty struct { } func (s *VersionProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -62,13 +65,33 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -77,6 +100,9 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -364,23 +390,40 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -668,20 +711,32 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/vertex.go b/typedapi/types/vertex.go old mode 100755 new mode 100644 index 620361dd12..fca6f905e9 --- a/typedapi/types/vertex.go +++ b/typedapi/types/vertex.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Vertex type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/Vertex.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/Vertex.ts#L23-L28 type Vertex struct { Depth int64 `json:"depth"` Field string `json:"field"` @@ -30,6 +40,70 @@ type Vertex struct { Weight Float64 `json:"weight"` } +func (s *Vertex) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "depth": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Depth = value + case float64: + f := int64(v) + s.Depth = f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "term": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Term = o + + case "weight": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Weight = f + case float64: + f := Float64(v) + s.Weight = f + } + + } + } + return nil +} + // NewVertex returns a Vertex. func NewVertex() *Vertex { r := &Vertex{} diff --git a/typedapi/types/vertexdefinition.go b/typedapi/types/vertexdefinition.go old mode 100755 new mode 100644 index 59c3902244..4d5018a768 --- a/typedapi/types/vertexdefinition.go +++ b/typedapi/types/vertexdefinition.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // VertexDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/Vertex.ts#L30-L37 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/Vertex.ts#L30-L37 type VertexDefinition struct { Exclude []string `json:"exclude,omitempty"` Field string `json:"field"` @@ -32,6 +42,87 @@ type VertexDefinition struct { Size *int `json:"size,omitempty"` } +func (s *VertexDefinition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "exclude": + if err := dec.Decode(&s.Exclude); err != nil { + return err + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "include": + if err := dec.Decode(&s.Include); err != nil { + return err + } + + case "min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.MinDocCount = &value + case float64: + f := int64(v) + s.MinDocCount = &f + } + + case "shard_min_doc_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.ShardMinDocCount = &value + case float64: + f := int64(v) + s.ShardMinDocCount = &f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = &value + case float64: + f := int(v) + s.Size = &f + } + + } + } + return nil +} + // NewVertexDefinition returns a VertexDefinition. func NewVertexDefinition() *VertexDefinition { r := &VertexDefinition{} diff --git a/typedapi/types/vertexinclude.go b/typedapi/types/vertexinclude.go old mode 100755 new mode 100644 index 1e1b318394..65f567de56 --- a/typedapi/types/vertexinclude.go +++ b/typedapi/types/vertexinclude.go @@ -16,18 +16,72 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // VertexInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/graph/_types/Vertex.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/graph/_types/Vertex.ts#L39-L42 type VertexInclude struct { Boost Float64 `json:"boost"` Term string `json:"term"` } +func (s *VertexInclude) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Boost = f + case float64: + f := Float64(v) + s.Boost = f + } + + case "term": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Term = o + + } + } + return nil +} + // NewVertexInclude returns a VertexInclude. func NewVertexInclude() *VertexInclude { r := &VertexInclude{} diff --git a/typedapi/types/vocabulary.go b/typedapi/types/vocabulary.go old mode 100755 new mode 100644 index 5e3557f82c..2927e1e125 --- a/typedapi/types/vocabulary.go +++ b/typedapi/types/vocabulary.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // Vocabulary type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L218-L220 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L218-L220 type Vocabulary struct { Index string `json:"index"` } +func (s *Vocabulary) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + } + } + return nil +} + // NewVocabulary returns a Vocabulary. func NewVocabulary() *Vocabulary { r := &Vocabulary{} diff --git a/typedapi/types/waitforactiveshards.go b/typedapi/types/waitforactiveshards.go old mode 100755 new mode 100644 index e36e6b9785..b94336fead --- a/typedapi/types/waitforactiveshards.go +++ b/typedapi/types/waitforactiveshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -25,5 +25,5 @@ package types // int // waitforactiveshardoptions.WaitForActiveShardOptions // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/common.ts#L122-L123 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/common.ts#L122-L123 type WaitForActiveShards interface{} diff --git a/typedapi/types/warmerstats.go b/typedapi/types/warmerstats.go old mode 100755 new mode 100644 index 734da59ee9..03f0cd0374 --- a/typedapi/types/warmerstats.go +++ b/typedapi/types/warmerstats.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WarmerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Stats.ts#L252-L257 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Stats.ts#L252-L257 type WarmerStats struct { Current int64 `json:"current"` Total int64 `json:"total"` @@ -30,6 +40,66 @@ type WarmerStats struct { TotalTimeInMillis int64 `json:"total_time_in_millis"` } +func (s *WarmerStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Current = value + case float64: + f := int64(v) + s.Current = f + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + case "total_time": + if err := dec.Decode(&s.TotalTime); err != nil { + return err + } + + case "total_time_in_millis": + if err := dec.Decode(&s.TotalTimeInMillis); err != nil { + return err + } + + } + } + return nil +} + // NewWarmerStats returns a WarmerStats. func NewWarmerStats() *WarmerStats { r := &WarmerStats{} diff --git a/typedapi/types/watch.go b/typedapi/types/watch.go old mode 100755 new mode 100644 index 04ce48bb0c..61d645daf3 --- a/typedapi/types/watch.go +++ b/typedapi/types/watch.go @@ -16,27 +16,99 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // Watch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Watch.ts#L37-L47 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Watch.ts#L37-L47 type Watch struct { - Actions map[string]WatcherAction `json:"actions"` - Condition WatcherCondition `json:"condition"` - Input WatcherInput `json:"input"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` - Status *WatchStatus `json:"status,omitempty"` - ThrottlePeriod Duration `json:"throttle_period,omitempty"` - ThrottlePeriodInMillis *int64 `json:"throttle_period_in_millis,omitempty"` - Transform *TransformContainer `json:"transform,omitempty"` - Trigger TriggerContainer `json:"trigger"` + Actions map[string]WatcherAction `json:"actions"` + Condition WatcherCondition `json:"condition"` + Input WatcherInput `json:"input"` + Metadata Metadata `json:"metadata,omitempty"` + Status *WatchStatus `json:"status,omitempty"` + ThrottlePeriod Duration `json:"throttle_period,omitempty"` + ThrottlePeriodInMillis *int64 `json:"throttle_period_in_millis,omitempty"` + Transform *TransformContainer `json:"transform,omitempty"` + Trigger TriggerContainer `json:"trigger"` +} + +func (s *Watch) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if s.Actions == nil { + s.Actions = make(map[string]WatcherAction, 0) + } + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "condition": + if err := dec.Decode(&s.Condition); err != nil { + return err + } + + case "input": + if err := dec.Decode(&s.Input); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "throttle_period": + if err := dec.Decode(&s.ThrottlePeriod); err != nil { + return err + } + + case "throttle_period_in_millis": + if err := dec.Decode(&s.ThrottlePeriodInMillis); err != nil { + return err + } + + case "transform": + if err := dec.Decode(&s.Transform); err != nil { + return err + } + + case "trigger": + if err := dec.Decode(&s.Trigger); err != nil { + return err + } + + } + } + return nil } // NewWatch returns a Watch. diff --git a/typedapi/types/watcher.go b/typedapi/types/watcher.go old mode 100755 new mode 100644 index a85e87c48e..2c4bf6b4e6 --- a/typedapi/types/watcher.go +++ b/typedapi/types/watcher.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Watcher type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L451-L455 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L451-L455 type Watcher struct { Available bool `json:"available"` Count Counter `json:"count"` @@ -31,6 +41,69 @@ type Watcher struct { Watch WatcherWatch `json:"watch"` } +func (s *Watcher) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "count": + if err := dec.Decode(&s.Count); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "execution": + if err := dec.Decode(&s.Execution); err != nil { + return err + } + + case "watch": + if err := dec.Decode(&s.Watch); err != nil { + return err + } + + } + } + return nil +} + // NewWatcher returns a Watcher. func NewWatcher() *Watcher { r := &Watcher{} diff --git a/typedapi/types/watcheraction.go b/typedapi/types/watcheraction.go old mode 100755 new mode 100644 index fffa8c09e4..66330c5837 --- a/typedapi/types/watcheraction.go +++ b/typedapi/types/watcheraction.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/actiontype" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // WatcherAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L41-L57 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L41-L57 type WatcherAction struct { ActionType *actiontype.ActionType `json:"action_type,omitempty"` Condition *WatcherCondition `json:"condition,omitempty"` @@ -44,6 +52,110 @@ type WatcherAction struct { Webhook *WebhookAction `json:"webhook,omitempty"` } +func (s *WatcherAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action_type": + if err := dec.Decode(&s.ActionType); err != nil { + return err + } + + case "condition": + if err := dec.Decode(&s.Condition); err != nil { + return err + } + + case "email": + if err := dec.Decode(&s.Email); err != nil { + return err + } + + case "foreach": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Foreach = &o + + case "index": + if err := dec.Decode(&s.Index); err != nil { + return err + } + + case "logging": + if err := dec.Decode(&s.Logging); err != nil { + return err + } + + case "max_iterations": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxIterations = &value + case float64: + f := int(v) + s.MaxIterations = &f + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "pagerduty": + if err := dec.Decode(&s.Pagerduty); err != nil { + return err + } + + case "slack": + if err := dec.Decode(&s.Slack); err != nil { + return err + } + + case "throttle_period": + if err := dec.Decode(&s.ThrottlePeriod); err != nil { + return err + } + + case "throttle_period_in_millis": + if err := dec.Decode(&s.ThrottlePeriodInMillis); err != nil { + return err + } + + case "transform": + if err := dec.Decode(&s.Transform); err != nil { + return err + } + + case "webhook": + if err := dec.Decode(&s.Webhook); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherAction returns a WatcherAction. func NewWatcherAction() *WatcherAction { r := &WatcherAction{} diff --git a/typedapi/types/watcheractions.go b/typedapi/types/watcheractions.go old mode 100755 new mode 100644 index b24cf344ad..2eb56b6c91 --- a/typedapi/types/watcheractions.go +++ b/typedapi/types/watcheractions.go @@ -16,17 +16,53 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WatcherActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L387-L389 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L387-L389 type WatcherActions struct { Actions map[string]WatcherActionTotals `json:"actions"` } +func (s *WatcherActions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if s.Actions == nil { + s.Actions = make(map[string]WatcherActionTotals, 0) + } + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherActions returns a WatcherActions. func NewWatcherActions() *WatcherActions { r := &WatcherActions{ diff --git a/typedapi/types/watcheractiontotals.go b/typedapi/types/watcheractiontotals.go old mode 100755 new mode 100644 index a8d2178273..c426cbdfd9 --- a/typedapi/types/watcheractiontotals.go +++ b/typedapi/types/watcheractiontotals.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WatcherActionTotals type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L403-L406 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L403-L406 type WatcherActionTotals struct { Total Duration `json:"total"` TotalTimeInMs int64 `json:"total_time_in_ms"` } +func (s *WatcherActionTotals) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "total": + if err := dec.Decode(&s.Total); err != nil { + return err + } + + case "total_time_in_ms": + if err := dec.Decode(&s.TotalTimeInMs); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherActionTotals returns a WatcherActionTotals. func NewWatcherActionTotals() *WatcherActionTotals { r := &WatcherActionTotals{} diff --git a/typedapi/types/watchercondition.go b/typedapi/types/watchercondition.go old mode 100755 new mode 100644 index d7af4af48c..c5de9b201b --- a/typedapi/types/watchercondition.go +++ b/typedapi/types/watchercondition.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/conditionop" + + "bytes" + "errors" + "io" + + "encoding/json" ) // WatcherCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Conditions.ts#L47-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Conditions.ts#L47-L59 type WatcherCondition struct { Always *AlwaysCondition `json:"always,omitempty"` ArrayCompare map[string]ArrayCompareCondition `json:"array_compare,omitempty"` @@ -35,6 +41,57 @@ type WatcherCondition struct { Script *ScriptCondition `json:"script,omitempty"` } +func (s *WatcherCondition) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "always": + if err := dec.Decode(&s.Always); err != nil { + return err + } + + case "array_compare": + if s.ArrayCompare == nil { + s.ArrayCompare = make(map[string]ArrayCompareCondition, 0) + } + if err := dec.Decode(&s.ArrayCompare); err != nil { + return err + } + + case "compare": + if s.Compare == nil { + s.Compare = make(map[string]map[conditionop.ConditionOp]FieldValue, 0) + } + if err := dec.Decode(&s.Compare); err != nil { + return err + } + + case "never": + if err := dec.Decode(&s.Never); err != nil { + return err + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherCondition returns a WatcherCondition. func NewWatcherCondition() *WatcherCondition { r := &WatcherCondition{ diff --git a/typedapi/types/watcherinput.go b/typedapi/types/watcherinput.go old mode 100755 new mode 100644 index 761925f563..671a96544d --- a/typedapi/types/watcherinput.go +++ b/typedapi/types/watcherinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -26,7 +26,7 @@ import ( // WatcherInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Input.ts#L90-L98 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Input.ts#L90-L98 type WatcherInput struct { Chain *ChainInput `json:"chain,omitempty"` Http *HttpInput `json:"http,omitempty"` diff --git a/typedapi/types/watchernodestats.go b/typedapi/types/watchernodestats.go old mode 100755 new mode 100644 index 297e0be9bd..0a1c8e980a --- a/typedapi/types/watchernodestats.go +++ b/typedapi/types/watchernodestats.go @@ -16,17 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/watcherstate" + + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" ) // WatcherNodeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/types.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/types.ts#L33-L40 type WatcherNodeStats struct { CurrentWatches []WatchRecordStats `json:"current_watches,omitempty"` ExecutionThreadPool ExecutionThreadPool `json:"execution_thread_pool"` @@ -36,6 +44,66 @@ type WatcherNodeStats struct { WatcherState watcherstate.WatcherState `json:"watcher_state"` } +func (s *WatcherNodeStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "current_watches": + if err := dec.Decode(&s.CurrentWatches); err != nil { + return err + } + + case "execution_thread_pool": + if err := dec.Decode(&s.ExecutionThreadPool); err != nil { + return err + } + + case "node_id": + if err := dec.Decode(&s.NodeId); err != nil { + return err + } + + case "queued_watches": + if err := dec.Decode(&s.QueuedWatches); err != nil { + return err + } + + case "watch_count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.WatchCount = value + case float64: + f := int64(v) + s.WatchCount = f + } + + case "watcher_state": + if err := dec.Decode(&s.WatcherState); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherNodeStats returns a WatcherNodeStats. func NewWatcherNodeStats() *WatcherNodeStats { r := &WatcherNodeStats{} diff --git a/typedapi/types/watcherstatusactions.go b/typedapi/types/watcherstatusactions.go old mode 100755 new mode 100644 index cabb25f291..8eafeeae21 --- a/typedapi/types/watcherstatusactions.go +++ b/typedapi/types/watcherstatusactions.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // WatcherStatusActions type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Action.ts#L59-L59 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Action.ts#L59-L59 type WatcherStatusActions map[string]ActionStatus diff --git a/typedapi/types/watcherwatch.go b/typedapi/types/watcherwatch.go old mode 100755 new mode 100644 index ce91732098..517654c359 --- a/typedapi/types/watcherwatch.go +++ b/typedapi/types/watcherwatch.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WatcherWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L391-L396 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L391-L396 type WatcherWatch struct { Action map[string]Counter `json:"action,omitempty"` Condition map[string]Counter `json:"condition,omitempty"` @@ -30,6 +38,55 @@ type WatcherWatch struct { Trigger WatcherWatchTrigger `json:"trigger"` } +func (s *WatcherWatch) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "action": + if s.Action == nil { + s.Action = make(map[string]Counter, 0) + } + if err := dec.Decode(&s.Action); err != nil { + return err + } + + case "condition": + if s.Condition == nil { + s.Condition = make(map[string]Counter, 0) + } + if err := dec.Decode(&s.Condition); err != nil { + return err + } + + case "input": + if s.Input == nil { + s.Input = make(map[string]Counter, 0) + } + if err := dec.Decode(&s.Input); err != nil { + return err + } + + case "trigger": + if err := dec.Decode(&s.Trigger); err != nil { + return err + } + + } + } + return nil +} + // NewWatcherWatch returns a WatcherWatch. func NewWatcherWatch() *WatcherWatch { r := &WatcherWatch{ diff --git a/typedapi/types/watcherwatchtrigger.go b/typedapi/types/watcherwatchtrigger.go old mode 100755 new mode 100644 index 1ac537ff28..d6cdb3172f --- a/typedapi/types/watcherwatchtrigger.go +++ b/typedapi/types/watcherwatchtrigger.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // WatcherWatchTrigger type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L398-L401 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L398-L401 type WatcherWatchTrigger struct { All_ Counter `json:"_all"` Schedule *WatcherWatchTriggerSchedule `json:"schedule,omitempty"` diff --git a/typedapi/types/watcherwatchtriggerschedule.go b/typedapi/types/watcherwatchtriggerschedule.go old mode 100755 new mode 100644 index ea42431a5e..a3e6b732ef --- a/typedapi/types/watcherwatchtriggerschedule.go +++ b/typedapi/types/watcherwatchtriggerschedule.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WatcherWatchTriggerSchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L457-L460 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L457-L460 type WatcherWatchTriggerSchedule struct { Active int64 `json:"active"` All_ Counter `json:"_all"` @@ -30,6 +40,66 @@ type WatcherWatchTriggerSchedule struct { Total int64 `json:"total"` } +func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "active": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Active = value + case float64: + f := int64(v) + s.Active = f + } + + case "_all": + if err := dec.Decode(&s.All_); err != nil { + return err + } + + case "cron": + if err := dec.Decode(&s.Cron); err != nil { + return err + } + + case "total": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Total = value + case float64: + f := int64(v) + s.Total = f + } + + } + } + return nil +} + // NewWatcherWatchTriggerSchedule returns a WatcherWatchTriggerSchedule. func NewWatcherWatchTriggerSchedule() *WatcherWatchTriggerSchedule { r := &WatcherWatchTriggerSchedule{} diff --git a/typedapi/types/watchrecord.go b/typedapi/types/watchrecord.go old mode 100755 new mode 100644 index 9bb0b8cacc..0c93f0e0d9 --- a/typedapi/types/watchrecord.go +++ b/typedapi/types/watchrecord.go @@ -16,24 +16,28 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/executionstatus" + + "bytes" + "errors" + "io" + + "encoding/json" ) // WatchRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/execute_watch/types.ts#L27-L39 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/execute_watch/types.ts#L27-L39 type WatchRecord struct { Condition WatcherCondition `json:"condition"` Input WatcherInput `json:"input"` Messages []string `json:"messages"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` Node string `json:"node"` Result ExecutionResult `json:"result"` State executionstatus.ExecutionStatus `json:"state"` @@ -43,6 +47,84 @@ type WatchRecord struct { WatchId string `json:"watch_id"` } +func (s *WatchRecord) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "condition": + if err := dec.Decode(&s.Condition); err != nil { + return err + } + + case "input": + if err := dec.Decode(&s.Input); err != nil { + return err + } + + case "messages": + if err := dec.Decode(&s.Messages); err != nil { + return err + } + + case "metadata": + if err := dec.Decode(&s.Metadata); err != nil { + return err + } + + case "node": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Node = o + + case "result": + if err := dec.Decode(&s.Result); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "status": + if err := dec.Decode(&s.Status); err != nil { + return err + } + + case "trigger_event": + if err := dec.Decode(&s.TriggerEvent); err != nil { + return err + } + + case "user": + if err := dec.Decode(&s.User); err != nil { + return err + } + + case "watch_id": + if err := dec.Decode(&s.WatchId); err != nil { + return err + } + + } + } + return nil +} + // NewWatchRecord returns a WatchRecord. func NewWatchRecord() *WatchRecord { r := &WatchRecord{} diff --git a/typedapi/types/watchrecordqueuedstats.go b/typedapi/types/watchrecordqueuedstats.go old mode 100755 new mode 100644 index a7caf2f4e8..178d72e89d --- a/typedapi/types/watchrecordqueuedstats.go +++ b/typedapi/types/watchrecordqueuedstats.go @@ -16,17 +16,50 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WatchRecordQueuedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/types.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/types.ts#L50-L52 type WatchRecordQueuedStats struct { ExecutionTime DateTime `json:"execution_time"` } +func (s *WatchRecordQueuedStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "execution_time": + if err := dec.Decode(&s.ExecutionTime); err != nil { + return err + } + + } + } + return nil +} + // NewWatchRecordQueuedStats returns a WatchRecordQueuedStats. func NewWatchRecordQueuedStats() *WatchRecordQueuedStats { r := &WatchRecordQueuedStats{} diff --git a/typedapi/types/watchrecordstats.go b/typedapi/types/watchrecordstats.go old mode 100755 new mode 100644 index 8cf2a3db4d..c54a980d3e --- a/typedapi/types/watchrecordstats.go +++ b/typedapi/types/watchrecordstats.go @@ -16,17 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/executionphase" + + "bytes" + "errors" + "io" + + "encoding/json" ) // WatchRecordStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/types.ts#L54-L60 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/types.ts#L54-L60 type WatchRecordStats struct { ExecutedActions []string `json:"executed_actions,omitempty"` ExecutionPhase executionphase.ExecutionPhase `json:"execution_phase"` @@ -36,6 +42,56 @@ type WatchRecordStats struct { WatchRecordId string `json:"watch_record_id"` } +func (s *WatchRecordStats) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "executed_actions": + if err := dec.Decode(&s.ExecutedActions); err != nil { + return err + } + + case "execution_phase": + if err := dec.Decode(&s.ExecutionPhase); err != nil { + return err + } + + case "execution_time": + if err := dec.Decode(&s.ExecutionTime); err != nil { + return err + } + + case "triggered_time": + if err := dec.Decode(&s.TriggeredTime); err != nil { + return err + } + + case "watch_id": + if err := dec.Decode(&s.WatchId); err != nil { + return err + } + + case "watch_record_id": + if err := dec.Decode(&s.WatchRecordId); err != nil { + return err + } + + } + } + return nil +} + // NewWatchRecordStats returns a WatchRecordStats. func NewWatchRecordStats() *WatchRecordStats { r := &WatchRecordStats{} diff --git a/typedapi/types/watchstatus.go b/typedapi/types/watchstatus.go old mode 100755 new mode 100644 index 3e589754a5..8b9b498ad5 --- a/typedapi/types/watchstatus.go +++ b/typedapi/types/watchstatus.go @@ -16,13 +16,21 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WatchStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Watch.ts#L49-L56 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Watch.ts#L49-L56 type WatchStatus struct { Actions WatcherStatusActions `json:"actions"` ExecutionState *string `json:"execution_state,omitempty"` @@ -32,6 +40,59 @@ type WatchStatus struct { Version int64 `json:"version"` } +func (s *WatchStatus) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "actions": + if err := dec.Decode(&s.Actions); err != nil { + return err + } + + case "execution_state": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ExecutionState = &o + + case "last_checked": + if err := dec.Decode(&s.LastChecked); err != nil { + return err + } + + case "last_met_condition": + if err := dec.Decode(&s.LastMetCondition); err != nil { + return err + } + + case "state": + if err := dec.Decode(&s.State); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewWatchStatus returns a WatchStatus. func NewWatchStatus() *WatchStatus { r := &WatchStatus{} diff --git a/typedapi/types/webhookaction.go b/typedapi/types/webhookaction.go old mode 100755 new mode 100644 index 15ae018696..b298b8a236 --- a/typedapi/types/webhookaction.go +++ b/typedapi/types/webhookaction.go @@ -16,18 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/connectionscheme" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/httpinputmethod" + + "bytes" + "errors" + "io" + + "encoding/json" ) // WebhookAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L293-L293 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L293-L293 type WebhookAction struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` @@ -44,6 +50,106 @@ type WebhookAction struct { Url *string `json:"url,omitempty"` } +func (s *WebhookAction) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "auth": + if err := dec.Decode(&s.Auth); err != nil { + return err + } + + case "body": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Body = &o + + case "connection_timeout": + if err := dec.Decode(&s.ConnectionTimeout); err != nil { + return err + } + + case "headers": + if s.Headers == nil { + s.Headers = make(map[string]string, 0) + } + if err := dec.Decode(&s.Headers); err != nil { + return err + } + + case "host": + if err := dec.Decode(&s.Host); err != nil { + return err + } + + case "method": + if err := dec.Decode(&s.Method); err != nil { + return err + } + + case "params": + if s.Params == nil { + s.Params = make(map[string]string, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return err + } + + case "path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Path = &o + + case "port": + if err := dec.Decode(&s.Port); err != nil { + return err + } + + case "proxy": + if err := dec.Decode(&s.Proxy); err != nil { + return err + } + + case "read_timeout": + if err := dec.Decode(&s.ReadTimeout); err != nil { + return err + } + + case "scheme": + if err := dec.Decode(&s.Scheme); err != nil { + return err + } + + case "url": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Url = &o + + } + } + return nil +} + // NewWebhookAction returns a WebhookAction. func NewWebhookAction() *WebhookAction { r := &WebhookAction{ diff --git a/typedapi/types/webhookresult.go b/typedapi/types/webhookresult.go old mode 100755 new mode 100644 index 794c0c705d..64bac25b13 --- a/typedapi/types/webhookresult.go +++ b/typedapi/types/webhookresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // WebhookResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/_types/Actions.ts#L295-L298 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/_types/Actions.ts#L295-L298 type WebhookResult struct { Request HttpInputRequestResult `json:"request"` Response *HttpInputResponseResult `json:"response,omitempty"` diff --git a/typedapi/types/weightedaverageaggregation.go b/typedapi/types/weightedaverageaggregation.go old mode 100755 new mode 100644 index 253abc23c4..30b9829a61 --- a/typedapi/types/weightedaverageaggregation.go +++ b/typedapi/types/weightedaverageaggregation.go @@ -16,26 +16,86 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( - "encoding/json" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/valuetype" + + "bytes" + "errors" + "io" + + "encoding/json" ) // WeightedAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L211-L216 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L211-L216 type WeightedAverageAggregation struct { - Format *string `json:"format,omitempty"` - Meta map[string]json.RawMessage `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Value *WeightedAverageValue `json:"value,omitempty"` - ValueType *valuetype.ValueType `json:"value_type,omitempty"` - Weight *WeightedAverageValue `json:"weight,omitempty"` + Format *string `json:"format,omitempty"` + Meta Metadata `json:"meta,omitempty"` + Name *string `json:"name,omitempty"` + Value *WeightedAverageValue `json:"value,omitempty"` + ValueType *valuetype.ValueType `json:"value_type,omitempty"` + Weight *WeightedAverageValue `json:"weight,omitempty"` +} + +func (s *WeightedAverageAggregation) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Format = &o + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Name = &o + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_type": + if err := dec.Decode(&s.ValueType); err != nil { + return err + } + + case "weight": + if err := dec.Decode(&s.Weight); err != nil { + return err + } + + } + } + return nil } // NewWeightedAverageAggregation returns a WeightedAverageAggregation. diff --git a/typedapi/types/weightedaveragevalue.go b/typedapi/types/weightedaveragevalue.go old mode 100755 new mode 100644 index c5e2246e3d..2fe363dff2 --- a/typedapi/types/weightedaveragevalue.go +++ b/typedapi/types/weightedaveragevalue.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WeightedAverageValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/metric.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/metric.ts#L218-L222 type WeightedAverageValue struct { Field *string `json:"field,omitempty"` Missing *Float64 `json:"missing,omitempty"` Script Script `json:"script,omitempty"` } +func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return err + } + + case "missing": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Missing = &f + case float64: + f := Float64(v) + s.Missing = &f + } + + case "script": + if err := dec.Decode(&s.Script); err != nil { + return err + } + + } + } + return nil +} + // NewWeightedAverageValue returns a WeightedAverageValue. func NewWeightedAverageValue() *WeightedAverageValue { r := &WeightedAverageValue{} diff --git a/typedapi/types/weightedavgaggregate.go b/typedapi/types/weightedavgaggregate.go old mode 100755 new mode 100644 index 9c9b4dabf0..15f4ea6869 --- a/typedapi/types/weightedavgaggregate.go +++ b/typedapi/types/weightedavgaggregate.go @@ -16,19 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types import ( + "bytes" + "errors" + "io" + "encoding/json" ) // WeightedAvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/aggregations/Aggregate.ts#L211-L215 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/aggregations/Aggregate.ts#L212-L216 type WeightedAvgAggregate struct { - Meta map[string]json.RawMessage `json:"meta,omitempty"` + Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. @@ -36,6 +40,44 @@ type WeightedAvgAggregate struct { ValueAsString *string `json:"value_as_string,omitempty"` } +func (s *WeightedAvgAggregate) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "meta": + if err := dec.Decode(&s.Meta); err != nil { + return err + } + + case "value": + if err := dec.Decode(&s.Value); err != nil { + return err + } + + case "value_as_string": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ValueAsString = &o + + } + } + return nil +} + // NewWeightedAvgAggregate returns a WeightedAvgAggregate. func NewWeightedAvgAggregate() *WeightedAvgAggregate { r := &WeightedAvgAggregate{} diff --git a/typedapi/types/weights.go b/typedapi/types/weights.go old mode 100755 new mode 100644 index 536329bc87..40bb840d0e --- a/typedapi/types/weights.go +++ b/typedapi/types/weights.go @@ -16,17 +16,63 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // Weights type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/put_trained_model/types.ts#L108-L110 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/put_trained_model/types.ts#L108-L110 type Weights struct { Weights Float64 `json:"weights"` } +func (s *Weights) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "weights": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + f := Float64(value) + s.Weights = f + case float64: + f := Float64(v) + s.Weights = f + } + + } + } + return nil +} + // NewWeights returns a Weights. func NewWeights() *Weights { r := &Weights{} diff --git a/typedapi/types/whitespaceanalyzer.go b/typedapi/types/whitespaceanalyzer.go old mode 100755 new mode 100644 index 1581915c22..1cdb1a3579 --- a/typedapi/types/whitespaceanalyzer.go +++ b/typedapi/types/whitespaceanalyzer.go @@ -16,18 +16,56 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "encoding/json" +) + // WhitespaceAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/analyzers.ts#L108-L111 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/analyzers.ts#L108-L111 type WhitespaceAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *WhitespaceAnalyzer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewWhitespaceAnalyzer returns a WhitespaceAnalyzer. func NewWhitespaceAnalyzer() *WhitespaceAnalyzer { r := &WhitespaceAnalyzer{} diff --git a/typedapi/types/whitespacetokenizer.go b/typedapi/types/whitespacetokenizer.go old mode 100755 new mode 100644 index 3a724c8d73..d3bf977123 --- a/typedapi/types/whitespacetokenizer.go +++ b/typedapi/types/whitespacetokenizer.go @@ -16,19 +16,75 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WhitespaceTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/tokenizers.ts#L114-L117 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/tokenizers.ts#L114-L117 type WhitespaceTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` } +func (s *WhitespaceTokenizer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "max_token_length": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.MaxTokenLength = &value + case float64: + f := int(v) + s.MaxTokenLength = &f + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewWhitespaceTokenizer returns a WhitespaceTokenizer. func NewWhitespaceTokenizer() *WhitespaceTokenizer { r := &WhitespaceTokenizer{} diff --git a/typedapi/types/wildcardproperty.go b/typedapi/types/wildcardproperty.go old mode 100755 new mode 100644 index 8b7ce5fe55..72be4d1783 --- a/typedapi/types/wildcardproperty.go +++ b/typedapi/types/wildcardproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types @@ -27,12 +27,14 @@ import ( "errors" "io" + "strconv" + "encoding/json" ) // WildcardProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/mapping/core.ts#L269-L273 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/mapping/core.ts#L269-L273 type WildcardProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -49,6 +51,7 @@ type WildcardProperty struct { } func (s *WildcardProperty) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) for { @@ -63,13 +66,33 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { switch t { case "copy_to": - if err := dec.Decode(&s.CopyTo); err != nil { - return err + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return err + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return err + } } case "doc_values": - if err := dec.Decode(&s.DocValues); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.DocValues = &value + case bool: + s.DocValues = &v } case "dynamic": @@ -78,6 +101,9 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { } case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -365,28 +391,48 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { } s.Fields[key] = oo default: - if err := dec.Decode(&s.Fields); err != nil { + if err := localDec.Decode(&s.Fields); err != nil { return err } } } case "ignore_above": - if err := dec.Decode(&s.IgnoreAbove); err != nil { - return err + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f } case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } if err := dec.Decode(&s.Meta); err != nil { return err } case "null_value": - if err := dec.Decode(&s.NullValue); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.NullValue = &o case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { @@ -674,20 +720,32 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { } s.Properties[key] = oo default: - if err := dec.Decode(&s.Properties); err != nil { + if err := localDec.Decode(&s.Properties); err != nil { return err } } } case "similarity": - if err := dec.Decode(&s.Similarity); err != nil { + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { return err } + o := string(tmp) + s.Similarity = &o case "store": - if err := dec.Decode(&s.Store); err != nil { - return err + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Store = &value + case bool: + s.Store = &v } case "type": diff --git a/typedapi/types/wildcardquery.go b/typedapi/types/wildcardquery.go old mode 100755 new mode 100644 index 3d4a5d9d8e..b8677c1e6c --- a/typedapi/types/wildcardquery.go +++ b/typedapi/types/wildcardquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WildcardQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/term.ts#L149-L162 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/term.ts#L149-L162 type WildcardQuery struct { Boost *float32 `json:"boost,omitempty"` // CaseInsensitive Allows case insensitive matching of the pattern with the indexed field values @@ -40,6 +50,90 @@ type WildcardQuery struct { Wildcard *string `json:"wildcard,omitempty"` } +func (s *WildcardQuery) UnmarshalJSON(data []byte) error { + + if !bytes.HasPrefix(data, []byte(`{`)) { + err := json.NewDecoder(bytes.NewReader(data)).Decode(&s.Value) + return err + } + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "case_insensitive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CaseInsensitive = &value + case bool: + s.CaseInsensitive = &v + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + case "rewrite": + if err := dec.Decode(&s.Rewrite); err != nil { + return err + } + + case "value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Value = &o + + case "wildcard": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Wildcard = &o + + } + } + return nil +} + // NewWildcardQuery returns a WildcardQuery. func NewWildcardQuery() *WildcardQuery { r := &WildcardQuery{} diff --git a/typedapi/types/wktgeobounds.go b/typedapi/types/wktgeobounds.go old mode 100755 new mode 100644 index 85ceb31b1e..730327fad5 --- a/typedapi/types/wktgeobounds.go +++ b/typedapi/types/wktgeobounds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // WktGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/Geo.ts#L134-L136 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/Geo.ts#L134-L136 type WktGeoBounds struct { Wkt string `json:"wkt"` } diff --git a/typedapi/types/worddelimitergraphtokenfilter.go b/typedapi/types/worddelimitergraphtokenfilter.go old mode 100755 new mode 100644 index bc718a549f..68fa3f4df4 --- a/typedapi/types/worddelimitergraphtokenfilter.go +++ b/typedapi/types/worddelimitergraphtokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WordDelimiterGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L148-L165 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L149-L166 type WordDelimiterGraphTokenFilter struct { AdjustOffsets *bool `json:"adjust_offsets,omitempty"` CatenateAll *bool `json:"catenate_all,omitempty"` @@ -43,6 +53,216 @@ type WordDelimiterGraphTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "adjust_offsets": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.AdjustOffsets = &value + case bool: + s.AdjustOffsets = &v + } + + case "catenate_all": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateAll = &value + case bool: + s.CatenateAll = &v + } + + case "catenate_numbers": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateNumbers = &value + case bool: + s.CatenateNumbers = &v + } + + case "catenate_words": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateWords = &value + case bool: + s.CatenateWords = &v + } + + case "generate_number_parts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.GenerateNumberParts = &value + case bool: + s.GenerateNumberParts = &v + } + + case "generate_word_parts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.GenerateWordParts = &value + case bool: + s.GenerateWordParts = &v + } + + case "ignore_keywords": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.IgnoreKeywords = &value + case bool: + s.IgnoreKeywords = &v + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "protected_words": + if err := dec.Decode(&s.ProtectedWords); err != nil { + return err + } + + case "protected_words_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ProtectedWordsPath = &o + + case "split_on_case_change": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitOnCaseChange = &value + case bool: + s.SplitOnCaseChange = &v + } + + case "split_on_numerics": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitOnNumerics = &value + case bool: + s.SplitOnNumerics = &v + } + + case "stem_english_possessive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StemEnglishPossessive = &value + case bool: + s.StemEnglishPossessive = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "type_table": + if err := dec.Decode(&s.TypeTable); err != nil { + return err + } + + case "type_table_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TypeTablePath = &o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewWordDelimiterGraphTokenFilter returns a WordDelimiterGraphTokenFilter. func NewWordDelimiterGraphTokenFilter() *WordDelimiterGraphTokenFilter { r := &WordDelimiterGraphTokenFilter{} diff --git a/typedapi/types/worddelimitertokenfilter.go b/typedapi/types/worddelimitertokenfilter.go old mode 100755 new mode 100644 index efd2cc9b98..30a7bf360a --- a/typedapi/types/worddelimitertokenfilter.go +++ b/typedapi/types/worddelimitertokenfilter.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WordDelimiterTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/analysis/token_filters.ts#L131-L146 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/analysis/token_filters.ts#L132-L147 type WordDelimiterTokenFilter struct { CatenateAll *bool `json:"catenate_all,omitempty"` CatenateNumbers *bool `json:"catenate_numbers,omitempty"` @@ -41,6 +51,188 @@ type WordDelimiterTokenFilter struct { Version *string `json:"version,omitempty"` } +func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "catenate_all": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateAll = &value + case bool: + s.CatenateAll = &v + } + + case "catenate_numbers": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateNumbers = &value + case bool: + s.CatenateNumbers = &v + } + + case "catenate_words": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.CatenateWords = &value + case bool: + s.CatenateWords = &v + } + + case "generate_number_parts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.GenerateNumberParts = &value + case bool: + s.GenerateNumberParts = &v + } + + case "generate_word_parts": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.GenerateWordParts = &value + case bool: + s.GenerateWordParts = &v + } + + case "preserve_original": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.PreserveOriginal = &value + case bool: + s.PreserveOriginal = &v + } + + case "protected_words": + if err := dec.Decode(&s.ProtectedWords); err != nil { + return err + } + + case "protected_words_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ProtectedWordsPath = &o + + case "split_on_case_change": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitOnCaseChange = &value + case bool: + s.SplitOnCaseChange = &v + } + + case "split_on_numerics": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.SplitOnNumerics = &value + case bool: + s.SplitOnNumerics = &v + } + + case "stem_english_possessive": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.StemEnglishPossessive = &value + case bool: + s.StemEnglishPossessive = &v + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return err + } + + case "type_table": + if err := dec.Decode(&s.TypeTable); err != nil { + return err + } + + case "type_table_path": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.TypeTablePath = &o + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return err + } + + } + } + return nil +} + // NewWordDelimiterTokenFilter returns a WordDelimiterTokenFilter. func NewWordDelimiterTokenFilter() *WordDelimiterTokenFilter { r := &WordDelimiterTokenFilter{} diff --git a/typedapi/types/wrapperquery.go b/typedapi/types/wrapperquery.go old mode 100755 new mode 100644 index 9db6498ee5..064924b74d --- a/typedapi/types/wrapperquery.go +++ b/typedapi/types/wrapperquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // WrapperQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/_types/query_dsl/abstractions.ts#L197-L200 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/_types/query_dsl/abstractions.ts#L197-L200 type WrapperQuery struct { Boost *float32 `json:"boost,omitempty"` // Query A base64 encoded query. The binary data format can be any of JSON, YAML, CBOR @@ -31,6 +41,58 @@ type WrapperQuery struct { QueryName_ *string `json:"_name,omitempty"` } +func (s *WrapperQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return err + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.QueryName_ = &o + + } + } + return nil +} + // NewWrapperQuery returns a WrapperQuery. func NewWrapperQuery() *WrapperQuery { r := &WrapperQuery{} diff --git a/typedapi/types/xpackdatafeed.go b/typedapi/types/xpackdatafeed.go old mode 100755 new mode 100644 index c37ccf2cb6..dc46edf6a5 --- a/typedapi/types/xpackdatafeed.go +++ b/typedapi/types/xpackdatafeed.go @@ -16,17 +16,62 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L77-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L77-L79 type XpackDatafeed struct { Count int64 `json:"count"` } +func (s *XpackDatafeed) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + s.Count = value + case float64: + f := int64(v) + s.Count = f + } + + } + } + return nil +} + // NewXpackDatafeed returns a XpackDatafeed. func NewXpackDatafeed() *XpackDatafeed { r := &XpackDatafeed{} diff --git a/typedapi/types/xpackfeature.go b/typedapi/types/xpackfeature.go old mode 100755 new mode 100644 index 47668ef41d..916efe6380 --- a/typedapi/types/xpackfeature.go +++ b/typedapi/types/xpackfeature.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/types.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/types.ts#L74-L79 type XpackFeature struct { Available bool `json:"available"` Description *string `json:"description,omitempty"` @@ -30,6 +40,67 @@ type XpackFeature struct { NativeCodeInfo *NativeCodeInformation `json:"native_code_info,omitempty"` } +func (s *XpackFeature) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "description": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.Description = &o + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "native_code_info": + if err := dec.Decode(&s.NativeCodeInfo); err != nil { + return err + } + + } + } + return nil +} + // NewXpackFeature returns a XpackFeature. func NewXpackFeature() *XpackFeature { r := &XpackFeature{} diff --git a/typedapi/types/xpackfeatures.go b/typedapi/types/xpackfeatures.go old mode 100755 new mode 100644 index 289edad760..6ca1cbba7a --- a/typedapi/types/xpackfeatures.go +++ b/typedapi/types/xpackfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types // XpackFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/types.ts#L42-L72 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/types.ts#L42-L72 type XpackFeatures struct { AggregateMetric XpackFeature `json:"aggregate_metric"` Analytics XpackFeature `json:"analytics"` diff --git a/typedapi/types/xpackquery.go b/typedapi/types/xpackquery.go old mode 100755 new mode 100644 index 8a393a6335..4ef80bd15c --- a/typedapi/types/xpackquery.go +++ b/typedapi/types/xpackquery.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L253-L258 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L253-L258 type XpackQuery struct { Count *int `json:"count,omitempty"` Failed *int `json:"failed,omitempty"` @@ -30,6 +40,90 @@ type XpackQuery struct { Total *int `json:"total,omitempty"` } +func (s *XpackQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "count": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Count = &value + case float64: + f := int(v) + s.Count = &f + } + + case "failed": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Failed = &value + case float64: + f := int(v) + s.Failed = &f + } + + case "paging": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Paging = &value + case float64: + f := int(v) + s.Paging = &f + } + + case "total": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Total = &value + case float64: + f := int(v) + s.Total = &f + } + + } + } + return nil +} + // NewXpackQuery returns a XpackQuery. func NewXpackQuery() *XpackQuery { r := &XpackQuery{} diff --git a/typedapi/types/xpackrealm.go b/typedapi/types/xpackrealm.go old mode 100755 new mode 100644 index a924e8349f..7025d81718 --- a/typedapi/types/xpackrealm.go +++ b/typedapi/types/xpackrealm.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L408-L417 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L408-L417 type XpackRealm struct { Available bool `json:"available"` Cache []RealmCache `json:"cache,omitempty"` @@ -36,6 +46,94 @@ type XpackRealm struct { Size []int64 `json:"size,omitempty"` } +func (s *XpackRealm) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "cache": + if err := dec.Decode(&s.Cache); err != nil { + return err + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "has_authorization_realms": + if err := dec.Decode(&s.HasAuthorizationRealms); err != nil { + return err + } + + case "has_default_username_pattern": + if err := dec.Decode(&s.HasDefaultUsernamePattern); err != nil { + return err + } + + case "has_truststore": + if err := dec.Decode(&s.HasTruststore); err != nil { + return err + } + + case "is_authentication_delegated": + if err := dec.Decode(&s.IsAuthenticationDelegated); err != nil { + return err + } + + case "name": + if err := dec.Decode(&s.Name); err != nil { + return err + } + + case "order": + if err := dec.Decode(&s.Order); err != nil { + return err + } + + case "size": + if err := dec.Decode(&s.Size); err != nil { + return err + } + + } + } + return nil +} + // NewXpackRealm returns a XpackRealm. func NewXpackRealm() *XpackRealm { r := &XpackRealm{} diff --git a/typedapi/types/xpackrolemapping.go b/typedapi/types/xpackrolemapping.go old mode 100755 new mode 100644 index 7908c0611d..bb946fb193 --- a/typedapi/types/xpackrolemapping.go +++ b/typedapi/types/xpackrolemapping.go @@ -16,18 +16,80 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L264-L267 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L264-L267 type XpackRoleMapping struct { Enabled int `json:"enabled"` Size int `json:"size"` } +func (s *XpackRoleMapping) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "enabled": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Enabled = value + case float64: + f := int(v) + s.Enabled = f + } + + case "size": + + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return err + } + s.Size = value + case float64: + f := int(v) + s.Size = f + } + + } + } + return nil +} + // NewXpackRoleMapping returns a XpackRoleMapping. func NewXpackRoleMapping() *XpackRoleMapping { r := &XpackRoleMapping{} diff --git a/typedapi/types/xpackruntimefieldtypes.go b/typedapi/types/xpackruntimefieldtypes.go old mode 100755 new mode 100644 index 9f7f230bd1..5584184c7f --- a/typedapi/types/xpackruntimefieldtypes.go +++ b/typedapi/types/xpackruntimefieldtypes.go @@ -16,19 +16,82 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // XpackRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/types.ts#L269-L271 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/types.ts#L269-L271 type XpackRuntimeFieldTypes struct { Available bool `json:"available"` Enabled bool `json:"enabled"` FieldTypes []RuntimeFieldsType `json:"field_types"` } +func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "available": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Available = value + case bool: + s.Available = v + } + + case "enabled": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.Enabled = value + case bool: + s.Enabled = v + } + + case "field_types": + if err := dec.Decode(&s.FieldTypes); err != nil { + return err + } + + } + } + return nil +} + // NewXpackRuntimeFieldTypes returns a XpackRuntimeFieldTypes. func NewXpackRuntimeFieldTypes() *XpackRuntimeFieldTypes { r := &XpackRuntimeFieldTypes{} diff --git a/typedapi/types/zeroshotclassificationinferenceoptions.go b/typedapi/types/zeroshotclassificationinferenceoptions.go old mode 100755 new mode 100644 index 1051b9e215..2ad01c663a --- a/typedapi/types/zeroshotclassificationinferenceoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ZeroShotClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L186-L207 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L186-L207 type ZeroShotClassificationInferenceOptions struct { // ClassificationLabels The zero shot classification labels indicating entailment, neutral, and // contradiction @@ -41,6 +51,71 @@ type ZeroShotClassificationInferenceOptions struct { Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` } +func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "classification_labels": + if err := dec.Decode(&s.ClassificationLabels); err != nil { + return err + } + + case "hypothesis_template": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.HypothesisTemplate = &o + + case "labels": + if err := dec.Decode(&s.Labels); err != nil { + return err + } + + case "multi_label": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MultiLabel = &value + case bool: + s.MultiLabel = &v + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewZeroShotClassificationInferenceOptions returns a ZeroShotClassificationInferenceOptions. func NewZeroShotClassificationInferenceOptions() *ZeroShotClassificationInferenceOptions { r := &ZeroShotClassificationInferenceOptions{} diff --git a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go old mode 100755 new mode 100644 index d8e02c4775..e6a7577711 --- a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go @@ -16,13 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package types +import ( + "bytes" + "errors" + "io" + + "strconv" + + "encoding/json" +) + // ZeroShotClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/ml/_types/inference.ts#L339-L348 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/ml/_types/inference.ts#L339-L348 type ZeroShotClassificationInferenceUpdateOptions struct { // Labels The labels to predict. Labels []string `json:"labels"` @@ -36,6 +46,58 @@ type ZeroShotClassificationInferenceUpdateOptions struct { Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` } +func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "labels": + if err := dec.Decode(&s.Labels); err != nil { + return err + } + + case "multi_label": + var tmp interface{} + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return err + } + s.MultiLabel = &value + case bool: + s.MultiLabel = &v + } + + case "results_field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return err + } + o := string(tmp) + s.ResultsField = &o + + case "tokenization": + if err := dec.Decode(&s.Tokenization); err != nil { + return err + } + + } + } + return nil +} + // NewZeroShotClassificationInferenceUpdateOptions returns a ZeroShotClassificationInferenceUpdateOptions. func NewZeroShotClassificationInferenceUpdateOptions() *ZeroShotClassificationInferenceUpdateOptions { r := &ZeroShotClassificationInferenceUpdateOptions{} diff --git a/typedapi/watcher/ackwatch/ack_watch.go b/typedapi/watcher/ackwatch/ack_watch.go old mode 100755 new mode 100644 index 659dc04e5e..a9197b27e4 --- a/typedapi/watcher/ackwatch/ack_watch.go +++ b/typedapi/watcher/ackwatch/ack_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Acknowledges a watch, manually throttling the execution of the watch's // actions. @@ -192,7 +192,6 @@ func (r AckWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/ackwatch/response.go b/typedapi/watcher/ackwatch/response.go old mode 100755 new mode 100644 index a874c8098f..5739ea8389 --- a/typedapi/watcher/ackwatch/response.go +++ b/typedapi/watcher/ackwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package ackwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package ackwatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 type Response struct { Status types.WatchStatus `json:"status"` diff --git a/typedapi/watcher/activatewatch/activate_watch.go b/typedapi/watcher/activatewatch/activate_watch.go old mode 100755 new mode 100644 index d56de08fb5..6433a7411b --- a/typedapi/watcher/activatewatch/activate_watch.go +++ b/typedapi/watcher/activatewatch/activate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Activates a currently inactive watch. package activatewatch @@ -172,7 +172,6 @@ func (r ActivateWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/activatewatch/response.go b/typedapi/watcher/activatewatch/response.go old mode 100755 new mode 100644 index 6fc3d0dc1c..a426e47baa --- a/typedapi/watcher/activatewatch/response.go +++ b/typedapi/watcher/activatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package activatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package activatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` diff --git a/typedapi/watcher/deactivatewatch/deactivate_watch.go b/typedapi/watcher/deactivatewatch/deactivate_watch.go old mode 100755 new mode 100644 index eed5a7c026..58798486fd --- a/typedapi/watcher/deactivatewatch/deactivate_watch.go +++ b/typedapi/watcher/deactivatewatch/deactivate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Deactivates a currently active watch. package deactivatewatch @@ -172,7 +172,6 @@ func (r DeactivateWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/deactivatewatch/response.go b/typedapi/watcher/deactivatewatch/response.go old mode 100755 new mode 100644 index 53c366eda2..db1e62eb2c --- a/typedapi/watcher/deactivatewatch/response.go +++ b/typedapi/watcher/deactivatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deactivatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deactivatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` diff --git a/typedapi/watcher/deletewatch/delete_watch.go b/typedapi/watcher/deletewatch/delete_watch.go old mode 100755 new mode 100644 index a3abbe625b..c1f578f77e --- a/typedapi/watcher/deletewatch/delete_watch.go +++ b/typedapi/watcher/deletewatch/delete_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Removes a watch from Watcher. package deletewatch @@ -170,7 +170,6 @@ func (r DeleteWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/deletewatch/response.go b/typedapi/watcher/deletewatch/response.go old mode 100755 new mode 100644 index 8683deab7d..eb7d2b8d93 --- a/typedapi/watcher/deletewatch/response.go +++ b/typedapi/watcher/deletewatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package deletewatch // Response holds the response body struct for the package deletewatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 type Response struct { Found bool `json:"found"` diff --git a/typedapi/watcher/executewatch/execute_watch.go b/typedapi/watcher/executewatch/execute_watch.go old mode 100755 new mode 100644 index eb67cfc1ef..4e75045812 --- a/typedapi/watcher/executewatch/execute_watch.go +++ b/typedapi/watcher/executewatch/execute_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Forces the execution of a stored watch. package executewatch @@ -215,7 +215,6 @@ func (r ExecuteWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/executewatch/request.go b/typedapi/watcher/executewatch/request.go old mode 100755 new mode 100644 index 95d66eab0b..bda21b6a24 --- a/typedapi/watcher/executewatch/request.go +++ b/typedapi/watcher/executewatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package executewatch @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L80 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L80 type Request struct { // ActionModes Determines how to handle the watch actions as part of the watch execution. diff --git a/typedapi/watcher/executewatch/response.go b/typedapi/watcher/executewatch/response.go old mode 100755 new mode 100644 index d4c5c27724..af99d95b96 --- a/typedapi/watcher/executewatch/response.go +++ b/typedapi/watcher/executewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package executewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 type Response struct { Id_ string `json:"_id"` diff --git a/typedapi/watcher/getwatch/get_watch.go b/typedapi/watcher/getwatch/get_watch.go old mode 100755 new mode 100644 index 47827badb9..e4577bf83c --- a/typedapi/watcher/getwatch/get_watch.go +++ b/typedapi/watcher/getwatch/get_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves a watch by its ID. package getwatch @@ -170,7 +170,6 @@ func (r GetWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/getwatch/response.go b/typedapi/watcher/getwatch/response.go old mode 100755 new mode 100644 index d2b657df90..c2138f0b90 --- a/typedapi/watcher/getwatch/response.go +++ b/typedapi/watcher/getwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package getwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getwatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 type Response struct { Found bool `json:"found"` diff --git a/typedapi/watcher/putwatch/put_watch.go b/typedapi/watcher/putwatch/put_watch.go old mode 100755 new mode 100644 index 2e4ac7566e..f4ae373725 --- a/typedapi/watcher/putwatch/put_watch.go +++ b/typedapi/watcher/putwatch/put_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Creates a new watch, or updates an existing one. package putwatch @@ -206,7 +206,6 @@ func (r PutWatch) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/putwatch/request.go b/typedapi/watcher/putwatch/request.go old mode 100755 new mode 100644 index f8c8105c4b..3432fe36e8 --- a/typedapi/watcher/putwatch/request.go +++ b/typedapi/watcher/putwatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putwatch @@ -29,12 +29,12 @@ import ( // Request holds the request body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L54 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L54 type Request struct { Actions map[string]types.WatcherAction `json:"actions,omitempty"` Condition *types.WatcherCondition `json:"condition,omitempty"` Input *types.WatcherInput `json:"input,omitempty"` - Metadata map[string]json.RawMessage `json:"metadata,omitempty"` + Metadata types.Metadata `json:"metadata,omitempty"` ThrottlePeriod *string `json:"throttle_period,omitempty"` Transform *types.TransformContainer `json:"transform,omitempty"` Trigger *types.TriggerContainer `json:"trigger,omitempty"` diff --git a/typedapi/watcher/putwatch/response.go b/typedapi/watcher/putwatch/response.go old mode 100755 new mode 100644 index 119023cdd6..96798eee3f --- a/typedapi/watcher/putwatch/response.go +++ b/typedapi/watcher/putwatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package putwatch // Response holds the response body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 type Response struct { Created bool `json:"created"` diff --git a/typedapi/watcher/querywatches/query_watches.go b/typedapi/watcher/querywatches/query_watches.go old mode 100755 new mode 100644 index 7719c4601f..0ea394eada --- a/typedapi/watcher/querywatches/query_watches.go +++ b/typedapi/watcher/querywatches/query_watches.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves stored watches. package querywatches @@ -196,7 +196,6 @@ func (r QueryWatches) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/querywatches/request.go b/typedapi/watcher/querywatches/request.go old mode 100755 new mode 100644 index fc666b1ac0..26e8a004ce --- a/typedapi/watcher/querywatches/request.go +++ b/typedapi/watcher/querywatches/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package querywatches @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L49 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L49 type Request struct { // From The offset from the first result to fetch. Needs to be non-negative. diff --git a/typedapi/watcher/querywatches/response.go b/typedapi/watcher/querywatches/response.go old mode 100755 new mode 100644 index f819c17a50..1269aa3126 --- a/typedapi/watcher/querywatches/response.go +++ b/typedapi/watcher/querywatches/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package querywatches @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 type Response struct { Count int `json:"count"` diff --git a/typedapi/watcher/start/response.go b/typedapi/watcher/start/response.go old mode 100755 new mode 100644 index 6263926021..cb2522fb24 --- a/typedapi/watcher/start/response.go +++ b/typedapi/watcher/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/start/WatcherStartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/start/WatcherStartResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/watcher/start/start.go b/typedapi/watcher/start/start.go old mode 100755 new mode 100644 index 930852e36d..18d5df17ab --- a/typedapi/watcher/start/start.go +++ b/typedapi/watcher/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Starts Watcher if it is not already running. package start @@ -159,7 +159,6 @@ func (r Start) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/stats/response.go b/typedapi/watcher/stats/response.go old mode 100755 new mode 100644 index 7c2a283d3d..91b586d85c --- a/typedapi/watcher/stats/response.go +++ b/typedapi/watcher/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 type Response struct { ClusterName string `json:"cluster_name"` diff --git a/typedapi/watcher/stats/stats.go b/typedapi/watcher/stats/stats.go old mode 100755 new mode 100644 index 2c879445a0..4dec72dff8 --- a/typedapi/watcher/stats/stats.go +++ b/typedapi/watcher/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves the current Watcher metrics. package stats @@ -176,7 +176,6 @@ func (r Stats) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/watcher/stop/response.go b/typedapi/watcher/stop/response.go old mode 100755 new mode 100644 index fd15fdaf05..a772c900e1 --- a/typedapi/watcher/stop/response.go +++ b/typedapi/watcher/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 type Response struct { diff --git a/typedapi/watcher/stop/stop.go b/typedapi/watcher/stop/stop.go old mode 100755 new mode 100644 index 4c63100033..d942d5b0d6 --- a/typedapi/watcher/stop/stop.go +++ b/typedapi/watcher/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Stops Watcher if it is running. package stop @@ -159,7 +159,6 @@ func (r Stop) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/xpack/info/info.go b/typedapi/xpack/info/info.go old mode 100755 new mode 100644 index 2eea0d4641..5147c58698 --- a/typedapi/xpack/info/info.go +++ b/typedapi/xpack/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves information about the installed X-Pack features. package info @@ -158,7 +158,6 @@ func (r Info) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError() diff --git a/typedapi/xpack/info/response.go b/typedapi/xpack/info/response.go old mode 100755 new mode 100644 index dc6d75093b..8760f8ea6a --- a/typedapi/xpack/info/response.go +++ b/typedapi/xpack/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/info/XPackInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/info/XPackInfoResponse.ts#L22-L29 type Response struct { Build types.BuildInformation `json:"build"` diff --git a/typedapi/xpack/usage/response.go b/typedapi/xpack/usage/response.go old mode 100755 new mode 100644 index 896d96c4e3..ed1406d23a --- a/typedapi/xpack/usage/response.go +++ b/typedapi/xpack/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/1ad7fe36297b3a8e187b2259dedaf68a47bc236e/specification/xpack/usage/XPackUsageResponse.ts#L43-L76 +// https://github.com/elastic/elasticsearch-specification/blob/899364a63e7415b60033ddd49d50a30369da26d7/specification/xpack/usage/XPackUsageResponse.ts#L43-L76 type Response struct { AggregateMetric types.Base `json:"aggregate_metric"` diff --git a/typedapi/xpack/usage/usage.go b/typedapi/xpack/usage/usage.go old mode 100755 new mode 100644 index e288b0d1e1..d0c6824f4a --- a/typedapi/xpack/usage/usage.go +++ b/typedapi/xpack/usage/usage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/1ad7fe36297b3a8e187b2259dedaf68a47bc236e +// https://github.com/elastic/elasticsearch-specification/tree/899364a63e7415b60033ddd49d50a30369da26d7 // Retrieves usage information about the installed X-Pack features. package usage @@ -159,7 +159,6 @@ func (r Usage) Do(ctx context.Context) (*Response, error) { } return response, nil - } errorResponse := types.NewElasticsearchError()